text stringlengths 8 4.13M |
|---|
use super::{utils::*, CommandCounter, ShardManagerContainer, TrackOwner};
use serenity::{
builder::CreateMessage,
client::{bridge::gateway::ShardId, Context},
framework::standard::{macros::command, CommandResult},
model::{channel::Message, id},
};
use std::time::Instant;
#[command]
#[aliases("s")]
#[description = "Data on the bot"]
// TODO: Display correctly cache size
pub async fn stats(ctx: &Context, msg: &Message) -> CommandResult {
// Measure time elapsed while seding a message (REST latency)
let now = Instant::now();
let mut sand = msg.channel_id.say(&ctx, "Measuring REST latency").await?;
let http_latency = format!("{}ms", now.elapsed().as_millis());
let data = ctx.data.read().await;
// Get WS latency from the ShardManagerContainer
let ws_latency = {
let mutex = data.get::<ShardManagerContainer>().unwrap().clone();
let manager = mutex.lock().await;
let runners = manager.runners.lock().await;
let runner = runners.get(&ShardId(ctx.shard_id));
// Might not have a value, just use ?
runner
.map(|r| r.latency.map(|l| format!("{}ms", l.as_millis())))
.flatten()
.unwrap_or("?ms".to_owned())
};
// TODO: Better way to do this?
let top_commands = {
let map = data.get::<CommandCounter>().unwrap().clone();
let mut count: Vec<(&String, &u64)> = map.iter().collect();
count.sort_by(|a, b| b.1.cmp(a.1));
let lines: Vec<String> = count
.iter()
.enumerate()
.filter(|(a, _)| *a < 3)
.map(|(_, (cmd, count))| format!("{} - {}", count, cmd))
.collect();
lines.join("\n")
};
let cache_stats = {
let mut out = String::new();
out.push_str(&format!(
"Cached guilds: {}\n",
ctx.cache.guilds().await.len()
));
out.push_str(&format!(
"Cached channels: {}\n",
ctx.cache.guild_channel_count().await
));
out.push_str(&format!("Cached users: {}\n", ctx.cache.user_count().await));
// out.push_str(&format!("Cache size: {}B\n", size_of_val(&ctx.cache)));
out
};
let author = msg
.author_nick(&ctx)
.await
.unwrap_or(msg.author.name.clone());
let shard_count = ctx.cache.shard_count().await;
let guild_count = format!("{}", ctx.cache.guilds().await.len());
let colour = cached_colour(ctx, msg.guild(&ctx.cache).await).await;
sand.edit(&ctx, |m| {
m.content("").embed(|e| {
e.title("Stats")
.description(format!("Called by {}", author))
.fields(vec![
(
"Latency",
format!("Gateway: {}\nREST API: {}", ws_latency, http_latency),
true,
),
("Top commands", top_commands, true),
("Cache", cache_stats, false),
])
.footer(|f| {
f.text(format!(
"Shard: {}/{}, {} guilds",
ctx.shard_id + 1,
shard_count,
guild_count
))
})
.colour(colour)
})
})
.await?;
Ok(())
}
#[command]
#[aliases("q")]
#[only_in(guilds)]
#[description = "Print song queue"]
pub async fn queue(ctx: &Context, msg: &Message) -> CommandResult {
let guild_id = msg.guild_id.unwrap();
let manager = songbird::get(ctx).await.unwrap().clone();
if let Some(lock) = manager.get(guild_id) {
let call = lock.lock().await;
let queue = call.queue().current_queue();
let text = {
let mut out = Vec::with_capacity(queue.len());
for (i, e) in queue.iter().enumerate().take(16) {
let meta = e.metadata().clone();
let owner = if let Ok(o) = {
let read = e.typemap().read().await;
let user_id = read.get::<TrackOwner>().unwrap();
user_id.to_user(&ctx).await
} {
o.nick_in(&ctx, guild_id).await.unwrap_or(o.name)
} else {
"?".to_owned()
};
out.push(format!(
"`{}`: [{}]({}) {}\nRequested by {}",
i,
meta.title.unwrap_or("?".to_owned()),
meta.source_url.unwrap_or("?".to_owned()),
match meta.duration {
Some(d) => {
let s = d.as_secs();
format!("{}:{}", s / 60, s % 60)
}
None => "?".to_owned(),
},
owner
))
}
out.join("\n")
};
let colour = cached_colour(ctx, msg.guild(&ctx.cache).await).await;
handle_message(
msg.channel_id
.send_message(&ctx, |m| {
m.embed(|e| e.title("Queue").description(text).colour(colour))
})
.await,
);
} else {
handle_message(msg.channel_id.say(&ctx, "Not in a voice channel").await);
}
Ok(())
}
#[command]
#[aliases("n")]
#[only_in(guilds)]
#[description = "Show details on the current song"]
// TODO: Check if already playing
async fn np(ctx: &Context, msg: &Message) -> CommandResult {
let guild_id = msg.guild_id.unwrap();
let manager = songbird::get(ctx).await.unwrap().clone();
if let Some(lock) = manager.get(guild_id) {
let call = lock.lock().await;
let current = if let Some(t) = call.queue().current() {
t
} else {
handle_message(msg.channel_id.say(&ctx.http, "No song playing").await);
return Ok(());
};
let meta = current.metadata().clone();
let owner = {
let read = current.typemap().read().await;
*read.get::<TrackOwner>().unwrap()
};
let state = current.get_info().await.unwrap();
let mut message = format_metadata(&ctx, guild_id, meta, owner, state).await;
handle_message(msg.channel_id.send_message(&ctx, |_| &mut message).await);
} else {
handle_message(msg.channel_id.say(&ctx, "Not in a voice channel").await);
}
Ok(())
}
async fn format_metadata<'a>(
ctx: &Context,
gid: id::GuildId,
meta: songbird::input::Metadata,
author_id: id::UserId,
state: Box<songbird::tracks::TrackState>,
) -> CreateMessage<'a> {
let title = format!("Now playing: {}", meta.title.unwrap_or("".to_owned()));
let thumb = meta.thumbnail;
let owner = if let Ok(o) = { author_id.to_user(&ctx).await } {
o.nick_in(&ctx, gid).await.unwrap_or(o.name)
} else {
"?".to_owned()
};
let foot = format!("Requested by: {}", owner);
let mut fields = None;
{
let mut out = Vec::new();
if let Some(a) = meta.artist {
out.push(("Artist/Channel", a, true));
}
if let Some(a) = meta.date {
let mut d = a;
d.insert(6, '/');
d.insert(4, '/');
out.push(("Date", d, true));
}
if out.len() != 0 {
fields = Some(out)
}
}
let colour = cached_colour(ctx, ctx.cache.guild(gid).await).await;
let progress_bar = {
if let Some(d) = meta.duration {
fn as_mins(s: u64) -> String {
format!("{}:{}", s / 60, s % 60)
}
let p = state.position;
let d_int = d.as_secs();
let p_int = p.as_secs();
let ratio = (p_int as f32 / d_int as f32 * 30.0).round() as u8;
let mut bar = String::with_capacity(30);
for _ in 1..ratio {
bar.push('=')
}
bar.push('>');
for _ in 0..30 - ratio {
bar.push('-')
}
let mut out = String::new();
out.push_str(&format!(
"`{}` `{}` `{}`",
as_mins(p_int),
bar,
as_mins(d_int)
));
Some(out)
} else {
None
}
};
let desc = {
use songbird::tracks::{LoopState, PlayMode};
let mut out = String::new();
out.push_str(&meta.source_url.unwrap_or("".to_owned()));
if let Some(s) = progress_bar {
out.push('\n');
out.push_str(&s);
out.push('\n');
} else {
out.push('\n');
}
out.push_str("Status: ");
out.push_str(match state.playing {
PlayMode::Play => "Playing",
PlayMode::Pause => "Paused",
PlayMode::Stop => "Stopped",
PlayMode::End => "Ended",
_ => "?",
});
if let LoopState::Finite(l) = state.loops {
if l != 0 {
out.push_str(&format!("; {} loops left", l))
}
}
out
};
let mut message = CreateMessage::default();
message.embed(|e| {
if let Some(f) = fields {
e.fields(f);
}
if let Some(t) = thumb {
e.thumbnail(t);
}
e.title(title)
.description(desc)
.footer(|f| f.text(foot))
.colour(colour)
});
message
}
|
#[repr(u8)]
#[derive(Debug, Clone, Copy)]
pub enum Fg {
Black = 30,
Red,
Green,
Yellow,
Blue,
Magenta,
Cyan,
White,
Reset = 39,
BrightBlack = 90,
BrightRed,
BrightGreen,
BrightBlue,
BrightMagenta,
BrightCyan,
BrightWhite,
}
impl std::fmt::Display for Fg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "\x1b[{}m", *self as u8)
}
}
#[repr(u8)]
#[derive(Debug, Clone, Copy)]
pub enum Bg {
Black = 40,
Red,
Green,
Yellow,
Blue,
Magenta,
Cyan,
White,
Reset = 49,
BrightBlack = 100,
BrightRed,
BrightGreen,
BrightBlue,
BrightMagenta,
BrightCyan,
BrightWhite,
}
impl std::fmt::Display for Bg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "\x1b[{}m", *self as u8)
}
}
|
use kompact::component::AbstractComponent;
use kompact::prelude::*;
use time::*;
use crate::control::Control;
use crate::data::*;
use crate::prelude::*;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::ops::ControlFlow;
use std::ops::FromResidual;
use std::ops::Try;
use std::sync::Arc;
use std::time::Duration;
#[derive(ComponentDefinition)]
pub(crate) struct Channel<T: DynSharable> {
ctx: ComponentContext<Self>,
push_queue: VecDeque<Ask<T, ()>>,
data_queue: VecDeque<T>,
pull_queue: VecDeque<Ask<(), T>>,
pushers: usize,
pullers: usize,
}
impl<T: DynSharable> Channel<T> {
pub fn new() -> Self {
Channel {
ctx: ComponentContext::uninitialised(),
push_queue: VecDeque::with_capacity(100),
data_queue: VecDeque::with_capacity(10),
pull_queue: VecDeque::with_capacity(100),
pullers: 1,
pushers: 1,
}
}
}
pub fn channel<T: DynSharable>(ctx: &mut Context) -> (Pushable<T>, Pullable<T>) {
let chan = ctx.component.system().create(Channel::new);
ctx.component.system().start(&chan);
(Pushable(chan.actor_ref()), Pullable(chan.actor_ref()))
}
impl<T: DynSharable> ComponentLifecycle for Channel<T> {}
#[derive(Debug)]
pub(crate) enum Message<T: DynSharable> {
PushRequest(Ask<T, ()>),
PullRequest(Ask<(), T>),
AddPusher,
AddPuller,
DelPusher,
DelPuller,
}
impl<T: DynSharable> Actor for Channel<T> {
type Message = Message<T>;
fn receive_local(&mut self, msg: Self::Message) -> Handled {
match msg {
Message::PushRequest(ask) => self.push_queue.push_back(ask),
Message::PullRequest(ask) => self.pull_queue.push_back(ask),
Message::AddPusher => self.pushers += 1,
Message::AddPuller => self.pullers += 1,
Message::DelPusher => self.pushers -= 1,
Message::DelPuller => self.pullers -= 1,
}
while !self.push_queue.is_empty() && self.data_queue.len() < self.data_queue.capacity() {
let (promise, data) = self.push_queue.pop_front().unwrap().take();
promise.fulfil(()).unwrap();
self.data_queue.push_back(data);
}
while !self.pull_queue.is_empty() && !self.data_queue.is_empty() {
let (promise, id) = self.pull_queue.pop_front().unwrap().take();
let data = self.data_queue.pop_front().unwrap();
promise.fulfil(data).unwrap();
}
if self.pushers == 0 && self.data_queue.is_empty() || self.pullers == 0 {
Handled::DieNow
} else {
Handled::Ok
}
}
fn receive_network(&mut self, msg: NetMessage) -> Handled {
todo!()
}
}
pub struct Pushable<T: DynSharable>(pub(crate) ActorRef<Message<T>>);
pub struct Pullable<T: DynSharable>(pub(crate) ActorRef<Message<T>>);
impl<T: DynSharable> Clone for Pushable<T> {
fn clone(&self) -> Self {
self.0.tell(Message::AddPusher);
Pushable(self.0.clone())
}
}
impl<T: DynSharable> Clone for Pullable<T> {
fn clone(&self) -> Self {
Pullable(self.0.clone())
}
}
impl<T: DynSharable> Drop for Pushable<T> {
fn drop(&mut self) {
self.0.tell(Message::DelPusher);
}
}
impl<T: DynSharable> Drop for Pullable<T> {
fn drop(&mut self) {
self.0.tell(Message::DelPuller);
}
}
impl<T: DynSharable> Pushable<T> {
pub async fn push(&self, data: T) -> Control<()> {
self.0
.ask_with(|promise| Message::PushRequest(Ask::new(promise, data)))
.await
.map(Control::Continue)
.unwrap_or(Control::Finished)
}
}
impl<T: DynSharable> Pullable<T> {
pub async fn pull(&self) -> Control<T> {
self.0
.ask_with(|promise| Message::PullRequest(Ask::new(promise, ())))
.await
.map(Control::Continue)
.unwrap_or(Control::Finished)
}
}
|
use crate::utils::input_validator;
use crate::Result;
use reqwest::Client;
use serde_json::Value;
/// Checks for consistency of given hashes, not part of the public api
pub async fn check_consistency(client: Client, uri: String, hashes: Vec<String>) -> Result<Value> {
for hash in &hashes {
ensure!(
input_validator::is_hash(hash),
"Provided hash is not valid: {:?}",
hash
);
}
let body = json!({
"command": "checkConsistency",
"tails": hashes,
});
Ok(client
.post(&uri)
.header("ContentType", "application/json")
.header("X-IOTA-API-Version", "1")
.body(body.to_string())
.send()?
.json()?)
}
|
use carmen_core::gridstore::*;
use test_utils::*;
use fixedbitset::FixedBitSet;
const ALL_LANGUAGES: u128 = u128::max_value();
#[test]
fn coalesce_single_test_proximity_quadrants() {
let directory: tempfile::TempDir = tempfile::tempdir().unwrap();
let mut builder = GridStoreBuilder::new(directory.path()).unwrap();
let key = GridKey { phrase_id: 1, lang_set: 1 };
let entries = vec![
GridEntry { id: 1, x: 200, y: 200, relev: 1., score: 1, source_phrase_hash: 0 }, // ne
GridEntry { id: 2, x: 200, y: 0, relev: 1., score: 1, source_phrase_hash: 0 }, // se
GridEntry { id: 3, x: 0, y: 0, relev: 1., score: 1, source_phrase_hash: 0 }, // sw
GridEntry { id: 4, x: 0, y: 200, relev: 1., score: 1, source_phrase_hash: 0 }, // nw
];
builder.insert(&key, entries).expect("Unable to insert record");
builder.finish().unwrap();
let store =
GridStore::new_with_options(directory.path(), 14, 1, 200., global_bbox_for_zoom(14), 1.0)
.unwrap();
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 1 },
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
println!("Coalesce single - NE proximity");
let match_opts = MatchOpts {
zoom: 14,
proximity: Some([110, 115]), // NE proximity point
..MatchOpts::default()
};
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
let result_ids: Vec<u32> =
result.iter().map(|context| context.entries[0].grid_entry.id).collect();
let result_distances: Vec<f64> =
result.iter().map(|context| round(context.entries[0].distance, 0)).collect();
assert_eq!(result_ids, [1, 4, 2, 3], "Results are in the order ne, nw, se, sw");
assert_eq!(result_distances, [124.0, 139.0, 146.0, 159.0], "Result distances are correct");
println!("Coalesce single - SE proximity");
let match_opts = MatchOpts {
zoom: 14,
proximity: Some([110, 85]), // SE proximity point
..MatchOpts::default()
};
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
let result_ids: Vec<u32> =
result.iter().map(|context| context.entries[0].grid_entry.id).collect();
let result_distances: Vec<f64> =
result.iter().map(|context| round(context.entries[0].distance, 0)).collect();
assert_eq!(result_ids, [2, 3, 1, 4], "Results are in the order se, sw, ne, nw");
assert_eq!(result_distances, [124.0, 139.0, 146.0, 159.0], "Result distances are correct");
println!("Coalesce single - SW proximity");
let match_opts = MatchOpts {
zoom: 14,
proximity: Some([90, 85]), // SW proximity point
..MatchOpts::default()
};
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
let result_ids: Vec<u32> =
result.iter().map(|context| context.entries[0].grid_entry.id).collect();
let result_distances: Vec<f64> =
result.iter().map(|context| round(context.entries[0].distance, 0)).collect();
assert_eq!(result_ids, [3, 2, 4, 1], "Results are in the order sw, se, nw, ne");
assert_eq!(result_distances, [124.0, 139.0, 146.0, 159.0], "Result distances are correct");
println!("Coalesce single - NW proximity");
let match_opts = MatchOpts {
zoom: 14,
proximity: Some([90, 115]), // NW proximity point
..MatchOpts::default()
};
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
let result_ids: Vec<u32> =
result.iter().map(|context| context.entries[0].grid_entry.id).collect();
let result_distances: Vec<f64> =
result.iter().map(|context| round(context.entries[0].distance, 0)).collect();
assert_eq!(result_ids, [4, 1, 3, 2], "Results are in the order nw, ne, sw, se");
assert_eq!(result_distances, [124.0, 139.0, 146.0, 159.0], "Result distances are correct");
}
#[test]
fn coalesce_single_test_proximity_basic() {
let directory: tempfile::TempDir = tempfile::tempdir().unwrap();
let mut builder = GridStoreBuilder::new(directory.path()).unwrap();
let key = GridKey { phrase_id: 1, lang_set: 1 };
let entries = vec![
GridEntry { id: 1, x: 2, y: 2, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 2, x: 2, y: 0, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 3, x: 0, y: 0, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 4, x: 0, y: 2, relev: 1., score: 1, source_phrase_hash: 0 },
];
builder.insert(&key, entries).expect("Unable to insert record");
builder.finish().unwrap();
let store =
GridStore::new_with_options(directory.path(), 14, 1, 200., global_bbox_for_zoom(14), 1.0)
.unwrap();
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 1 },
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts { zoom: 14, proximity: Some([2, 2]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
let result_ids: Vec<u32> =
result.iter().map(|context| context.entries[0].grid_entry.id).collect();
assert_eq!(
result_ids,
[1, 2, 4, 3],
"Results with the same relev and score should be ordered by distance"
);
let result_distances: Vec<f64> =
result.iter().map(|context| round(context.entries[0].distance, 2)).collect();
assert_eq!(
result_distances,
[0.00, 2.00, 2.00, 2.83],
"Results with the same relev and score should be ordered by distance"
);
}
#[test]
fn coalesce_single_test_language_penalty() {
let directory: tempfile::TempDir = tempfile::tempdir().unwrap();
let mut builder = GridStoreBuilder::new(directory.path()).unwrap();
let key = GridKey { phrase_id: 1, lang_set: 1 };
let entries = vec![
GridEntry { id: 1, x: 2, y: 2, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 2, x: 2, y: 0, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 3, x: 0, y: 0, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 4, x: 0, y: 2, relev: 1., score: 1, source_phrase_hash: 0 },
];
builder.insert(&key, entries).expect("Unable to insert record");
builder.finish().unwrap();
let store =
GridStore::new_with_options(directory.path(), 14, 1, 1., global_bbox_for_zoom(14), 1.0)
.unwrap();
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 2 },
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery.clone()];
let match_opts = MatchOpts { zoom: 14, proximity: Some([2, 2]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result[0].relev, 1., "Contexts inside the proximity radius don't get a cross langauge penalty");
assert_eq!(result[0].entries[0].grid_entry.relev, 1., "Grids inside the proximity radius don't get a cross language penalty");
assert_eq!(result[0].entries[0].matches_language, false, "Matches language property is correctly set on CoalesceEntry");
assert_eq!(result[1].relev, 0.96, "Contexts outside the proximity radius get a cross langauge penalty");
assert_eq!(result[1].entries[0].grid_entry.relev, 0.96, "Grids outside the proximity radius get a cross language penalty");
assert_eq!(result[1].entries[0].matches_language, false, "Matches language property is correctly set on CoalesceEntry");
}
let match_opts = MatchOpts { zoom: 14, ..MatchOpts::default() };
let stack = vec![subquery.clone()];
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result[0].relev, 0.96, "With no proximity, cross language contexts get a penalty");
assert_eq!(result[0].entries[0].grid_entry.relev, 0.96, "With no proximity, cross language grids get a penalty");
assert_eq!(result[0].entries[0].matches_language, false, "Matches language property is correctly set on CoalesceEntry");
}
}
#[test]
fn coalesce_multi_test_language_penalty() {
// Add more specific layer into a store
let store1 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 1, lang_set: 1 },
entries: vec![
GridEntry { id: 1, x: 2, y: 2, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 2, x: 12800, y: 12800, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
1,
14,
1,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
// Add less specific layer into a store
let store2 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 2, lang_set: 1 },
entries: vec![
GridEntry { id: 3, x: 0, y: 0, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 4, x: 50, y: 50, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
2,
6,
0,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
// Subqueries with a different language set
println!("Coalesce multi - Subqueries with different language set from grids, with proximity");
let stack = vec![
PhrasematchSubquery {
store: &store1.store,
idx: store1.idx,
non_overlapping_indexes: store1.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: 2,
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: 2,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
];
let match_opts = MatchOpts { zoom: 14, proximity: Some([2, 2]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result[0].relev, 1., "Contexts inside the proximity radius don't get a cross langauge penalty");
assert_eq!(result[0].entries[0].grid_entry.relev, 0.5, "Grids inside the proximity radius don't get a cross language penalty");
assert_eq!(result[0].entries[0].matches_language, false, "matches_language property is correctly set on CoalesceEntry");
assert_eq!(result[1].relev, 0.96, "Contexts outside the proximity radius get a cross langauge penalty");
assert_eq!(result[1].entries[0].grid_entry.relev, 0.48, "1st grid outside the proximity radius get a cross language penalty");
assert_eq!(result[1].entries[1].grid_entry.relev, 0.48, "2nd grid outside the proximity radius gets a cross language penalty");
assert_eq!(result[1].entries[0].matches_language, false, "matches_language property is correctly set on 1st CoalesceEntry in context");
assert_eq!(result[1].entries[1].matches_language, false, "matches_language property is correctly set on 2nd CoalesceEntry in context");
}
println!("Coalesce multi - Subqueires with different lang set from grids, no proximity");
let match_opts = MatchOpts { zoom: 14, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result[0].relev, 0.96, "Cross language contexts get a penalty");
assert_eq!(result[0].entries[0].grid_entry.relev, 0.48, "Cross language grids get a penalty");
assert_eq!(result[0].entries[0].matches_language, false, "matches_language property is correctly set on CoalesceEntry");
}
}
#[test]
fn coalesce_single_test() {
let store = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 1, lang_set: 1 },
entries: vec![
GridEntry { id: 1, x: 1, y: 1, relev: 1., score: 3, source_phrase_hash: 0 },
GridEntry { id: 2, x: 2, y: 2, relev: 0.8, score: 3, source_phrase_hash: 0 },
GridEntry { id: 3, x: 3, y: 3, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
1,
6,
0,
FixedBitSet::with_capacity(MAX_INDEXES),
40.,
);
let subquery = PhrasematchSubquery {
store: &store.store,
idx: store.idx,
non_overlapping_indexes: store.non_overlapping_indexes.clone(),
weight: 1.,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 1 },
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
// Test default opts - no proximity or bbox
println!("Coalsece single - no proximity, no bbox");
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result[0].relev, 1., "1st result has relevance 1");
assert_eq!(result[0].entries.len(), 1, "1st result has one coalesce entry");
assert_eq!(result[0].entries[0].matches_language, true, "1st result is a language match");
assert_eq!(result[0].entries[0].distance, 0., "1st result has distance 0");
assert_eq!(result[0].entries[0].idx, 1, "1st result has idx of subquery");
assert_eq!(result[0].entries[0].mask, 1 << 0, "1st result has original mask");
assert_eq!(result[0].entries[0].scoredist, 3., "1st result scoredist is the grid score");
assert_eq!(result[0].entries[0].grid_entry, GridEntry {
id: 1,
x: 1,
y: 1,
relev: 1.,
score: 3,
source_phrase_hash: 0,
}, "1st result grid entry is the highest relevance and score");
assert_eq!(result[1].relev, 1., "2nd result has relevance 1");
assert_eq!(result[1].entries.len(), 1, "2nd result has one coalesce entry");
assert_eq!(result[1].entries[0].matches_language, true, "2nd result is a language match");
assert_eq!(result[1].entries[0].distance, 0., "2nd result has distance 0");
assert_eq!(result[1].entries[0].idx, 1, "2nd result has idx of subquery");
assert_eq!(result[1].entries[0].mask, 1 << 0, "2nd result has original mask");
assert_eq!(result[1].entries[0].scoredist, 1., "2nd result scoredist is the grid score");
assert_eq!(result[1].entries[0].grid_entry, GridEntry {
id: 3,
x: 3,
y: 3,
relev: 1.,
score: 1,
source_phrase_hash: 0,
}, "2nd result grid entry is the highest relevance, lower score");
assert_eq!(result[2].relev, 0.8, "3rd result has relevance 0.8");
assert_eq!(result[2].entries.len(), 1, "3rd result has one coalesce entry");
assert_eq!(result[2].entries[0].matches_language, true, "3rd result is a language match");
assert_eq!(result[2].entries[0].distance, 0., "3rd result has distance 0");
assert_eq!(result[2].entries[0].idx, 1, "3rd result has idx of subquery");
assert_eq!(result[2].entries[0].mask, 1 << 0, "3rd result has original mask");
assert_eq!(result[2].entries[0].scoredist, 3., "3rd result scoredist is the grid score");
assert_eq!(result[2].entries[0].grid_entry, GridEntry {
id: 2,
x: 2,
y: 2,
relev: 0.8,
score: 3,
source_phrase_hash: 0,
}, "3rd result grid entry is the lowest relevance, even though score is higher than 2nd");
}
// Test opts with proximity
println!("Coalsece single - with proximity");
let match_opts = MatchOpts { zoom: 6, proximity: Some([3, 3]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result[0].entries[0].grid_entry.id, 3, "1st result is the closest, even if its a slightly lower score");
assert_eq!(result[1].entries[0].grid_entry.id, 1, "2nd result is farther away than 3rd but has a higher relevance");
assert_eq!(result[2].entries[0].grid_entry.id, 2, "3rd is closer but has a lower relevance");
}
assert_eq!(result[0].relev, 1., "1st result has relevance 1");
assert_eq!(result[0].mask, 1, "1st result context has correct mask");
assert_eq!(result[0].entries.len(), 1, "1st result has 1 coalesce entries");
assert_eq!(
result[0].entries[0],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 16777219,
mask: 1 << 0,
distance: 0.,
scoredist: 1.5839497841387566,
grid_entry: GridEntry { id: 3, x: 3, y: 3, relev: 1., score: 1, source_phrase_hash: 0 }
},
"1st result entry has expected properties"
);
assert_eq!(result[1].relev, 1., "2nd result has relevance 1");
assert_eq!(result[1].mask, 1, "2nd result context has correct mask");
assert_eq!(result[1].entries.len(), 1, "2nd result has 1 coalesce entries");
assert_eq!(
result[1].entries[0],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 16777217,
mask: 1 << 0,
distance: 2.8284271247461903,
scoredist: 1.109893833332405,
grid_entry: GridEntry { id: 1, x: 1, y: 1, relev: 1., score: 3, source_phrase_hash: 0 }
},
"2nd result entry has expected properties"
);
assert_eq!(result[2].relev, 0.8, "3rd result has relevance 0.8");
assert_eq!(result[2].mask, 1, "2nd result context has correct mask");
assert_eq!(result[2].entries.len(), 1, "3rd result has 1 coalesce entries");
assert_eq!(
result[2].entries[0],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 16777218,
mask: 1 << 0,
distance: 1.4142135623730951,
// Has the same scoredist as 2nd result because they're both beyond proximity radius
scoredist: 1.109893833332405,
grid_entry: GridEntry {
id: 2,
x: 2,
y: 2,
relev: 0.8,
score: 3,
source_phrase_hash: 0,
}
},
"3rd result entry has expected properties"
);
// Test with bbox
println!("Coalsece single - with bbox");
let match_opts = MatchOpts { zoom: 6, bbox: Some([1, 1, 1, 1]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
assert_eq!(result[0].entries.len(), 1, "Only one result is within the bbox");
assert_eq!(result[0].entries[0].grid_entry.id, 1, "Result is the one that's within the bbox");
assert_eq!(
result[0],
CoalesceContext {
mask: 1 << 0,
relev: 1.,
entries: vec![CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 0,
mask: 1 << 0,
distance: 0.,
scoredist: 3.,
grid_entry: GridEntry {
id: 1,
x: 1,
y: 1,
relev: 1.,
score: 3,
source_phrase_hash: 0,
}
}],
},
"Result has expected properties"
);
// Test with bbox and proximity
println!("Coalesce single - with bbox and proximity");
let match_opts = MatchOpts { zoom: 6, bbox: Some([1, 1, 1, 1]), proximity: Some([1, 1]) };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
assert_eq!(result[0].entries.len(), 1, "Only one result is within the bbox");
assert_eq!(
result[0],
CoalesceContext {
mask: 1 << 0,
relev: 1.,
entries: vec![CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 0,
mask: 1 << 0,
distance: 0.,
scoredist: 1.7322531402718835,
grid_entry: GridEntry {
id: 1,
x: 1,
y: 1,
relev: 1.,
score: 3,
source_phrase_hash: 0,
}
}],
},
"Result has expected properties, including scoredist"
);
// TODO: test with more than one result within bbox, to make sure results are still ordered by proximity?
}
#[test]
fn coalesce_single_languages_test() {
let directory: tempfile::TempDir = tempfile::tempdir().unwrap();
let mut builder = GridStoreBuilder::new(directory.path()).unwrap();
let lang_sets: [Vec<u32>; 4] = [vec![0], vec![1], vec![0, 1], vec![2]];
// Load each grid_entry with a grid key for each language
for (i, langs) in lang_sets.iter().enumerate() {
let lang_set = langarray_to_langfield(&langs[..]);
let key = GridKey { phrase_id: 1, lang_set };
let grid_entry =
GridEntry { id: i as u32, x: 1, y: 1, relev: 1., score: 0, source_phrase_hash: 0 };
builder.insert(&key, vec![grid_entry]).expect("Unable to insert record");
}
builder.finish().unwrap();
let store =
GridStore::new_with_options(directory.path(), 6, 1, 200., global_bbox_for_zoom(6), 1.0)
.unwrap();
// Test query with all languages
println!("Coalesce single - all languages");
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result.len(), 4, "Returns 4 results");
assert_eq!(result[0].relev, 1., "1st result has relevance of 1");
assert_eq!(result[0].entries[0].grid_entry.id, 3, "1st result has highest grid id, which is the tiebreaker for sorting");
assert_eq!(result[0].entries[0].grid_entry.relev, 1., "1st result grid has original relevance");
assert_eq!(result[0].entries[0].matches_language, true, "1st result matches language");
assert_eq!(result[1].relev, 1., "2nd result has original relevance");
assert_eq!(result[1].entries[0].grid_entry.id, 2, "2nd result is the 2nd highest grid id");
assert_eq!(result[1].entries[0].grid_entry.relev, 1., "2nd result grid has original relevance");
assert_eq!(result[1].entries[0].matches_language, true, "2nd result matches language");
assert_eq!(result[2].relev, 1., "3rd result has original relevance");
assert_eq!(result[2].entries[0].grid_entry.id, 1, "3rd result is the 3rd highest grid id");
assert_eq!(result[2].entries[0].grid_entry.relev, 1., "3rd result grid has original relevance");
assert_eq!(result[2].entries[0].matches_language, true, "3rd result matches language");
assert_eq!(result[3].relev, 1., "4th result has original relevance");
assert_eq!(result[3].entries[0].grid_entry.id, 0, "4th result is the 4th highest grid id");
assert_eq!(result[3].entries[0].grid_entry.relev, 1., "4th result grid has original relevance");
assert_eq!(result[3].entries[0].matches_language, true, "4th result matches language");
}
// Test lanuage 0
println!("Coalesce single - language 0, language matching 2 grids");
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: langarray_to_langfield(&[0]),
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result.len(), 4, "Returns 4 results");
assert_eq!(result[0].relev, 1., "1st result has relevance of 1");
assert_eq!(result[0].entries[0].grid_entry.id, 2, "1st result is a grid with 0 in the lang set, and highest grid id");
assert_eq!(result[0].entries[0].grid_entry.relev, 1., "1st result grid has original relevance");
assert_eq!(result[0].entries[0].matches_language, true, "1st result matches language");
assert_eq!(result[1].relev, 1., "2nd result has original relevance");
assert_eq!(result[1].entries[0].grid_entry.id, 0, "2nd result is a grid with 0 in the lang set");
assert_eq!(result[1].entries[0].grid_entry.relev, 1., "2nd result grid has original relevance");
assert_eq!(result[1].entries[0].matches_language, true, "2nd result matches language");
assert_eq!(result[2].relev, 0.96, "3rd result has reduced relevance");
assert_eq!(result[2].entries[0].grid_entry.id, 3, "3rd result is a grid that doesnt include lang 0");
assert_eq!(result[2].entries[0].grid_entry.relev, 0.96, "3rd result grid has reduced relevance");
assert_eq!(result[2].entries[0].matches_language, false, "3rd result does not match language");
assert_eq!(result[3].relev, 0.96, "4th result has reduced relevance");
assert_eq!(result[3].entries[0].grid_entry.id, 1, "4th result is the 4th highest grid id");
assert_eq!(result[3].entries[0].grid_entry.relev, 0.96, "4th result grid has reduced relevance");
assert_eq!(result[3].entries[0].matches_language, false, "4th result does not match language");
}
println!("Coalesce single - language 3, language matching no grids");
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: langarray_to_langfield(&[3]),
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result.len(), 4, "Returns 4 results");
assert_eq!(result[0].relev, 0.96, "1st result has reduced relevance");
assert_eq!(result[0].entries[0].grid_entry.id, 3, "1st result has highest grid id, which is the tiebreaker for sorting");
assert_eq!(result[0].entries[0].grid_entry.relev, 0.96, "1st result grid has reduced relevance");
assert_eq!(result[0].entries[0].matches_language, false, "1st result does not match language");
assert_eq!(result[1].relev, 0.96, "2nd result has reduced relevance");
assert_eq!(result[1].entries[0].grid_entry.id, 2, "2nd result is the 2nd highest grid id");
assert_eq!(result[1].entries[0].grid_entry.relev, 0.96, "2nd result grid has reduced relevance");
assert_eq!(result[1].entries[0].matches_language, false, "2nd result does not match language");
assert_eq!(result[2].relev, 0.96, "3rd result has reduced relevance");
assert_eq!(result[2].entries[0].grid_entry.id, 1, "3rd result is the 3rd highest grid id");
assert_eq!(result[2].entries[0].grid_entry.relev, 0.96, "3rd result grid has reduced relevance");
assert_eq!(result[2].entries[0].matches_language, false, "3rd result does not match language");
assert_eq!(result[3].relev, 0.96, "4th result has reduced relevance");
assert_eq!(result[3].entries[0].grid_entry.id, 0, "4th result is the 4th highest grid id");
assert_eq!(result[3].entries[0].grid_entry.relev, 0.96, "4th result grid has reduced relevance");
assert_eq!(result[3].entries[0].matches_language, false, "4th result does not match language");
}
}
#[test]
fn coalesce_single_nearby_only() {
let directory: tempfile::TempDir = tempfile::tempdir().unwrap();
let mut builder = GridStoreBuilder::new(directory.path()).unwrap();
let key = GridKey { phrase_id: 1, lang_set: 1 };
let entries = vec![
GridEntry { id: 1, x: 100, y: 100, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 2, x: 50, y: 50, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 3, x: 90, y: 90, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 4, x: 200, y: 200, relev: 1., score: 1, source_phrase_hash: 0 },
];
builder.insert(&key, entries).expect("Unable to insert record");
builder.finish().unwrap();
let store =
GridStore::new_with_options(directory.path(), 14, 1, 200., global_bbox_for_zoom(14), 1.0)
.unwrap();
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
nearby_only: true,
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 1 },
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts { zoom: 14, proximity: Some([100, 100]), ..MatchOpts::default() };
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
let result_ids: Vec<u32> =
tree_result.iter().map(|context| context.entries[0].grid_entry.id).collect();
assert_eq!(
result_ids,
[1, 3],
"Results with the same relev and score should be ordered by distance"
);
}
#[test]
fn coalesce_single_test_bounds() {
let directory: tempfile::TempDir = tempfile::tempdir().unwrap();
let mut builder = GridStoreBuilder::new(directory.path()).unwrap();
let key = GridKey { phrase_id: 1, lang_set: 1 };
let entries = vec![
GridEntry { id: 1, x: 100, y: 100, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 2, x: 50, y: 50, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 3, x: 90, y: 90, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 4, x: 105, y: 105, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 5, x: 200, y: 200, relev: 1., score: 1, source_phrase_hash: 0 },
];
builder.insert(&key, entries).expect("Unable to insert record");
builder.finish().unwrap();
let store =
GridStore::new_with_options(directory.path(), 14, 1, 200., global_bbox_for_zoom(14), 1.0)
.unwrap();
println!("Coalesce single - bounds");
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
nearby_only: false,
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 1 },
bounds: Some([80, 80, 110, 110]),
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts { zoom: 14, proximity: None, ..MatchOpts::default() };
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
let result_ids: Vec<u32> =
tree_result.iter().map(|context| context.entries[0].grid_entry.id).collect();
assert_eq!(result_ids, [4, 1, 3], "Results are restricted to bounds");
println!("Coalesce single - bounds with nearby_only buffer");
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
nearby_only: true,
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 1 },
bounds: Some([40, 40, 95, 95]),
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts { zoom: 14, proximity: Some([90, 90]), ..MatchOpts::default() };
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
let result_ids: Vec<u32> =
tree_result.iter().map(|context| context.entries[0].grid_entry.id).collect();
assert_eq!(
result_ids,
[3],
"Results are restricted to the intersect of bounds and the nearby_only buffer"
);
println!("Coalesce single - bounds with nearby_only buffer and input bbox");
let subquery = PhrasematchSubquery {
store: &store,
idx: 1,
non_overlapping_indexes: FixedBitSet::with_capacity(MAX_INDEXES),
weight: 1.,
match_keys: vec![MatchKeyWithId {
nearby_only: true,
id: 0,
key: MatchKey { match_phrase: MatchPhrase::Range { start: 1, end: 3 }, lang_set: 1 },
bounds: Some([85, 85, 210, 210]),
..MatchKeyWithId::default()
}],
mask: 1 << 0,
};
let stack = vec![subquery];
let match_opts = MatchOpts {
zoom: 14,
proximity: Some([100, 100]),
bbox: Some([40, 40, 95, 95]),
..MatchOpts::default()
};
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
let result_ids: Vec<u32> =
tree_result.iter().map(|context| context.entries[0].grid_entry.id).collect();
assert_eq!(
result_ids,
[3],
"Results are restricted to the intersect of bounds, the nearby_only buffer, and the input bbox"
);
}
#[test]
fn coalesce_multi_test() {
// Add more specific layer into a store
let store1 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 1, lang_set: 1 },
entries: vec![
GridEntry { id: 1, x: 1, y: 1, relev: 1., score: 1, source_phrase_hash: 0 },
// TODO: this isn't a real tile at zoom 1. Maybe pick more realistic test case?
GridEntry { id: 2, x: 2, y: 2, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
0,
1,
0,
FixedBitSet::with_capacity(MAX_INDEXES),
40.,
);
let store2 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 2, lang_set: 1 },
entries: vec![
GridEntry { id: 1, x: 1, y: 1, relev: 1., score: 3, source_phrase_hash: 0 },
GridEntry { id: 2, x: 2, y: 2, relev: 1., score: 3, source_phrase_hash: 0 },
GridEntry { id: 3, x: 3, y: 3, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
1,
2,
1,
FixedBitSet::with_capacity(MAX_INDEXES),
40.,
);
let stack = vec![
PhrasematchSubquery {
store: &store1.store,
idx: store1.idx,
non_overlapping_indexes: store1.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: 1,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: 1,
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
];
// Test coalesce multi with no proximity or bbox
println!("Coalsece multi - no proximity no bbox");
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
assert_eq!(result[0].relev, 1., "1st result has relevance 1");
assert_eq!(result[0].mask, 3, "1st result context has correct mask");
assert_eq!(result[0].entries.len(), 2, "1st result has 2 coalesce entries");
assert_eq!(
result[0].entries[0],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 16777218,
mask: 1 << 0,
distance: 0.,
scoredist: 3.,
grid_entry: GridEntry {
id: 2,
x: 2,
y: 2,
relev: 0.5,
score: 3,
source_phrase_hash: 0,
}
},
"1st result 1st entry is the highest score from the higher zoom index"
);
assert_eq!(
result[0].entries[1],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 0,
tmp_id: 1,
mask: 1 << 1,
distance: 0.,
scoredist: 1.,
grid_entry: GridEntry {
id: 1,
x: 1,
y: 1,
relev: 0.5,
score: 1,
source_phrase_hash: 0,
}
},
"1st result 2nd entry is the overelpping grid from the lower zoom index"
);
assert_eq!(result[1].relev, 1., "2nd result has relevance 1");
assert_eq!(result[1].mask, 3, "2nd result context has correct mask");
assert_eq!(result[1].entries.len(), 2, "2nd result has 2 coalesce entries");
assert_eq!(
result[1].entries[0],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 16777219,
mask: 1 << 0,
distance: 0.,
scoredist: 1.,
grid_entry: GridEntry {
id: 3,
x: 3,
y: 3,
relev: 0.5,
score: 1,
source_phrase_hash: 0,
}
},
"2nd result 1st entry is the lower score grid that overlaps with a grid "
);
assert_eq!(
result[0].entries[1],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 0,
tmp_id: 1,
mask: 1 << 1,
distance: 0.,
scoredist: 1.,
grid_entry: GridEntry {
id: 1,
x: 1,
y: 1,
relev: 0.5,
score: 1,
source_phrase_hash: 0,
}
},
"2nd result 2nd entry is the overlapping grid from the lower zoom index"
);
// Test coalesce multi with proximity
println!("Coalesce multi - with proximity");
let match_opts = MatchOpts { zoom: 2, proximity: Some([3, 3]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
assert_eq!(result[0].relev, 1., "1st result context has relevance 1");
assert_eq!(result[0].mask, 3, "1st result context has correct mask");
assert_eq!(result[0].entries.len(), 2, "1st result has 2 coalesce entries");
assert_eq!(
result[0].entries[0],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 16777219,
mask: 1 << 0,
distance: 0.,
scoredist: 1.5839497841387566,
grid_entry: GridEntry {
id: 3,
x: 3,
y: 3,
relev: 0.5,
score: 1,
source_phrase_hash: 0,
}
},
"1st result 1st entry is closest entry in the higher zoom index"
);
assert_eq!(
result[0].entries[1],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 0,
tmp_id: 1,
mask: 1 << 1,
distance: 0.,
scoredist: 1.5839497841387566,
grid_entry: GridEntry {
id: 1,
x: 1,
y: 1,
relev: 0.5,
score: 1,
source_phrase_hash: 0,
}
},
"1st result 2nd entry is the overlapping entry, the distance for the outer entry is 0"
);
assert_eq!(result[1].entries.len(), 2, "2nd result has 2 coalesce entries");
assert_eq!(
result[1].entries[0],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 1,
tmp_id: 16777218,
mask: 1 << 0,
distance: 1.4142135623730951,
scoredist: 1.109893833332405,
grid_entry: GridEntry {
id: 2,
x: 2,
y: 2,
relev: 0.5,
score: 3,
source_phrase_hash: 0,
}
},
"2nd result 1st entry is the farther away entry from the higher zoom index"
);
assert_eq!(
result[1].entries[1],
CoalesceEntry {
phrasematch_id: 0,
matches_language: true,
idx: 0,
tmp_id: 1,
mask: 1 << 1,
distance: 0.,
scoredist: 1.5839497841387566,
grid_entry: GridEntry {
id: 1,
x: 1,
y: 1,
relev: 0.5,
score: 1,
source_phrase_hash: 0,
}
},
"2nd result 2nd entry is the overlapping entry, the distance for the outer entry is 0"
);
}
#[test]
fn coalesce_multi_languages_test() {
// Store 1 with grids in all languages
let store1 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 1, lang_set: ALL_LANGUAGES },
entries: vec![GridEntry {
id: 1,
x: 1,
y: 1,
relev: 1.,
score: 1,
source_phrase_hash: 0,
}],
}],
0,
1,
0,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
// Store 2 with grids in multiple language sets
let store2 = create_store(
vec![
// Insert grid with lang_set 1
StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 2, lang_set: langarray_to_langfield(&[1]) },
entries: vec![GridEntry {
id: 2,
x: 1,
y: 1,
relev: 1.,
score: 1,
source_phrase_hash: 0,
}],
},
// Insert grid with lang_set 0
StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 2, lang_set: langarray_to_langfield(&[0]) },
entries: vec![GridEntry {
id: 3,
x: 1,
y: 1,
relev: 1.,
score: 1,
source_phrase_hash: 0,
}],
},
],
1,
1,
1,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
// Test ALL LANGUAGES
println!("Coalesce multi - all languages");
let stack = vec![
PhrasematchSubquery {
store: &store1.store,
idx: store1.idx,
non_overlapping_indexes: store1.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
];
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result.len(), 2, "Two results are returned");
assert_eq!(result[0].entries.len(), 2, "1st context has two entries");
assert_eq!(result[0].relev, 1., "1st context has relevance of 1");
assert_eq!(result[0].entries[0].grid_entry.id, 3, "1st entry in 1st result has highest grid id, which is the tiebreaker for sorting");
assert_eq!(result[0].entries[0].grid_entry.relev, 0.5, "1st entry in 1st result has original relevance" );
assert_eq!(result[0].entries[0].matches_language, true, "1st entry in 1st result matches language" );
assert_eq!(result[0].entries[1].grid_entry.id, 1, "2nd entry in 1st result is the overapping grid" );
assert_eq!(result[0].entries[1].grid_entry.relev, 0.5, "2nd entry in 1st result has original relevance" );
assert_eq!(result[0].entries[1].matches_language, true, "2nd entry in 1st result matches language" );
assert_eq!(result[1].entries.len(), 2, "2nd context has two entries");
assert_eq!(result[1].relev, 1., "2nd context has relevance of 1");
assert_eq!(result[1].entries[0].grid_entry.id, 2, "1st entry in 2nd result is the lower grid id" );
assert_eq!(result[1].entries[0].grid_entry.relev, 0.5, "1st entry in 2nd result has original relevance" );
assert_eq!(result[1].entries[0].matches_language, true, "1st entry in 2nd result matches language" );
assert_eq!(result[1].entries[1].grid_entry.id, 1, "2nd entry in 2nd result is the overlapping grid" );
assert_eq!(result[1].entries[1].grid_entry.relev, 0.5, "2nd entry in 2nd result has original relevance" );
assert_eq!(result[1].entries[1].matches_language, true, "2nd entry in 2nd result matches language" );
}
// Test language 0
println!("Coalesce multi - language 0");
let stack = vec![
PhrasematchSubquery {
store: &store1.store,
idx: store1.idx,
non_overlapping_indexes: store1.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: langarray_to_langfield(&[0]),
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
];
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result.len(), 2, "Two results are returned");
assert_eq!(result[0].entries.len(), 2, "1st context has two entries");
assert_eq!(result[0].relev, 1., "1st context has relevance of 1");
assert_eq!(result[0].entries[0].grid_entry.id, 3, "1st entry in 1st result is the id of the better language match");
assert_eq!(result[0].entries[0].grid_entry.relev, 0.5, "1st entry in 1st result has original relevance");
assert_eq!(result[0].entries[0].matches_language, true, "1st entry in 1st result matches language");
assert_eq!(result[0].entries[1].grid_entry.id, 1, "2nd entry in 1st result is the overapping grid");
assert_eq!(result[0].entries[1].grid_entry.relev, 0.5, "2nd entry in 1st result has original relevance because the grid has all languages");
assert_eq!(result[0].entries[1].matches_language, true, "2nd entry in 1st result matches language");
assert_eq!(result[1].entries.len(), 2, "2nd context has two entries");
assert_eq!(result[1].relev, 0.98, "2nd context has lower overall relevance due to language penalty");
assert_eq!(result[1].entries[0].grid_entry.id, 2, "1st entry in 2nd result has the id of the worse language match");
assert_eq!(result[1].entries[0].grid_entry.relev, 0.48, "1st entry in 2nd result has lower relevance due to language penalty");
assert_eq!(result[1].entries[0].matches_language, false, "1st entry in 2nd result does not match language");
assert_eq!(result[1].entries[1].grid_entry.id, 1, "2nd entry in 2nd result is the overlapping grid");
assert_eq!(result[1].entries[1].grid_entry.relev, 0.5, "2nd entry in 2nd result has original relevance because the grid has all languages");
assert_eq!(result[1].entries[1].matches_language, true, "2nd entry in 2nd result matches language");
}
// Test language 3
println!("Coalsece multi - language 3");
let stack = vec![
PhrasematchSubquery {
store: &store1.store,
idx: store1.idx,
non_overlapping_indexes: store1.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: langarray_to_langfield(&[3]),
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
];
let match_opts = MatchOpts { zoom: 6, ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
#[cfg_attr(rustfmt, rustfmt::skip)]
{
assert_eq!(result.len(), 2, "Two results are returned");
assert_eq!(result[0].entries.len(), 2, "1st context has two entries");
assert_eq!(result[0].relev, 0.98, "1st context has lower overall relevance due to language penalty");
assert_eq!(result[0].entries[0].grid_entry.id, 3, "1st entry in 1st result has highest grid id, which is the tiebreaker for sorting");
assert_eq!(result[0].entries[0].grid_entry.relev, 0.48, "1st entry in 1st result has lower relevance due to language penalty");
assert_eq!(result[0].entries[0].matches_language, false, "1st entry in 1st result does not match language");
assert_eq!(result[0].entries[1].grid_entry.id, 1, "2nd entry in 1st result is the overapping grid");
assert_eq!(result[0].entries[1].grid_entry.relev, 0.5, "2nd entry in 1st result has original relevance because the grid has all languages");
assert_eq!(result[0].entries[1].matches_language, true, "2nd entry in 1st result matches language");
assert_eq!(result[1].entries.len(), 2, "2nd context has two entries");
assert_eq!(result[1].relev, 0.98, "2nd context has lower overall relevance due to language penalty");
assert_eq!(result[1].entries[0].grid_entry.id, 2, "1st entry in 2nd result has the id of the other grid");
assert_eq!(result[1].entries[0].grid_entry.relev, 0.48, "1st entry in 2nd result has lower relevance due to language penalty");
assert_eq!(result[1].entries[0].matches_language, false, "1st entry in 2nd result does not match language");
assert_eq!(result[1].entries[1].grid_entry.id, 1, "2nd entry in 2nd result is the overlapping grid");
assert_eq!(result[1].entries[1].grid_entry.relev, 0.5, "2nd entry in 2nd result has original relevance because the grid has all languages");
assert_eq!(result[1].entries[1].matches_language, true, "2nd entry in 2nd result matches language");
}
}
#[test]
fn coalesce_multi_scoredist() {
// Add more specific layer into a store
let store1 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 1, lang_set: 0 },
entries: vec![GridEntry {
id: 1,
x: 0,
y: 0,
relev: 1.,
score: 1,
source_phrase_hash: 0,
}],
}],
0,
0,
0,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
// Add less specific layer into a store
let store2 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 2, lang_set: 0 },
entries: vec![
GridEntry { id: 2, x: 4800, y: 6200, relev: 1., score: 7, source_phrase_hash: 0 },
GridEntry { id: 3, x: 4600, y: 6200, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
1,
14,
1,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
let stack = vec![
PhrasematchSubquery {
store: &store1.store,
idx: store1.idx,
non_overlapping_indexes: store1.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: 0,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: 0,
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
];
// Closer proximity to one grid
println!("Coalesce multi - proximity very close to one grid");
let match_opts = MatchOpts { zoom: 14, proximity: Some([4601, 6200]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
assert_eq!(result[0].entries[0].grid_entry.id, 3, "Closer feature is 1st");
assert_eq!(result[1].entries[0].grid_entry.id, 2, "Farther feature is 2nd");
assert_eq!(
result[0].entries[0].distance < result[1].entries[0].distance,
true,
"1st grid in 1st context is closer than 1st grid in 2nd context"
);
// Proximity is still close to same grid, but less close
println!("Coalesce multi - proximity less close to one grid");
let match_opts = MatchOpts { zoom: 14, proximity: Some([4610, 6200]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
assert_eq!(result, tree_result);
assert_eq!(result[0].entries[0].grid_entry.id, 3, "Farther feature with higher score is 1st");
assert_eq!(result[1].entries[0].grid_entry.id, 2, "Closer feature with lower score is 2nd");
assert_eq!(
result[0].entries[0].distance > result[1].entries[0].distance,
false,
"1st grid in 1st context is not closer than 1st grid in 2nd context"
);
}
// TODO: language tests
#[test]
fn coalesce_multi_test_bbox() {
let store1 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 1, lang_set: ALL_LANGUAGES },
entries: vec![
GridEntry { id: 1, x: 0, y: 0, relev: 0.8, score: 1, source_phrase_hash: 0 },
GridEntry { id: 2, x: 1, y: 1, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
0,
1,
0,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
let store2 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 2, lang_set: ALL_LANGUAGES },
entries: vec![
GridEntry { id: 3, x: 3, y: 0, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 4, x: 0, y: 3, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
1,
2,
1,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
let store3 = create_store(
vec![StoreEntryBuildingBlock {
grid_key: GridKey { phrase_id: 3, lang_set: ALL_LANGUAGES },
entries: vec![
GridEntry { id: 5, x: 21, y: 7, relev: 1., score: 1, source_phrase_hash: 0 },
GridEntry { id: 6, x: 21, y: 18, relev: 1., score: 1, source_phrase_hash: 0 },
],
}],
2,
5,
2,
FixedBitSet::with_capacity(MAX_INDEXES),
200.,
);
let stack = vec![
PhrasematchSubquery {
store: &store1.store,
idx: store1.idx,
non_overlapping_indexes: store1.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 3 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
];
// Test bbox at zoom 1 that should contain 2 grids
println!("Coalesce multi - bbox at lower zoom of subquery");
let match_opts = MatchOpts { zoom: 1, bbox: Some([0, 0, 1, 0]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let _tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
// assert_eq!(result, tree_result);
assert_eq!(result.len(), 2, "Bbox [1,0,0,1,0] - 2 results are within the bbox");
assert_eq!(
(result[0].entries[0].grid_entry.x, result[0].entries[0].grid_entry.y),
(3, 0),
"Bbox [1,0,0,1,0] - 1st result is zxy 2/3/0, and the higher relevance grid within the bbox"
);
assert_eq!(
(result[1].entries[0].grid_entry.x, result[1].entries[0].grid_entry.y),
(0, 0),
"Bbox [1,0,0,1,0] - 2nd result is zxy 1/0/0"
);
// Test bbox at zoom 2 that should contain 2 grids
println!("Coalesce multi - bbox at higher zoom of subquery");
let match_opts = MatchOpts { zoom: 2, bbox: Some([0, 0, 1, 3]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let _tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
// assert_eq!(result, tree_result);
assert_eq!(result.len(), 2, "Bbox [2,0,0,1,3] - 2 results are within the bbox");
assert_eq!(
(result[0].entries[0].grid_entry.x, result[0].entries[0].grid_entry.y),
(0, 3),
"Bbox [2,0,0,1,3] - 1st result is zxy 2/0/3"
);
assert_eq!(
(result[1].entries[0].grid_entry.x, result[1].entries[0].grid_entry.y),
(0, 0),
"Bbox [2,0,0,1,3] - 2nd result is zxy 1/0/0"
);
// Test bbox at zoom 6 that should contain 2 grids
println!("Coalesce multi - bbox at zoom 6");
let match_opts = MatchOpts { zoom: 6, bbox: Some([14, 30, 15, 64]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let _tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
// assert_eq!(result, tree_result);
assert_eq!(result.len(), 2, "Bbox [6,14,30,15,64] - 2 results are within the bbox");
assert_eq!(
(result[0].entries[0].grid_entry.x, result[0].entries[0].grid_entry.y),
(0, 3),
"Bbox [6,14,30,15,64] - 1st result is zxy 2/0/3"
);
assert_eq!(
(result[1].entries[0].grid_entry.x, result[1].entries[0].grid_entry.y),
(0, 0),
"Bbox [6,14,30,15,64] - 2nd result is zxy 1/0/0"
);
// Test bbox at lower zoom than either of the expected results
println!("Coalesce multi - bbox at lower zoom than either of the expected results");
let stack = vec![
PhrasematchSubquery {
store: &store2.store,
idx: store2.idx,
non_overlapping_indexes: store2.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 0,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 4 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 1,
},
PhrasematchSubquery {
store: &store3.store,
idx: store3.idx,
non_overlapping_indexes: store3.non_overlapping_indexes.clone(),
weight: 0.5,
match_keys: vec![MatchKeyWithId {
id: 1,
key: MatchKey {
match_phrase: MatchPhrase::Range { start: 1, end: 4 },
lang_set: ALL_LANGUAGES,
},
..MatchKeyWithId::default()
}],
mask: 1 << 0,
},
];
let match_opts = MatchOpts { zoom: 1, bbox: Some([0, 0, 1, 0]), ..MatchOpts::default() };
let result = coalesce(stack.iter().map(|s| s.clone().into()).collect(), &match_opts).unwrap();
let tree = stackable(&stack);
let _tree_result = truncate_coalesce_results(tree_coalesce(&tree, &match_opts).unwrap());
// assert_eq!(result, tree_result);
assert_eq!(result.len(), 2, "Bbox [1,0,0,1,0] - 2 results are within the bbox");
assert_eq!(
(result[0].entries[0].grid_entry.x, result[0].entries[0].grid_entry.y),
(3, 0),
"Bbox [1,0,0,1,0] - 1st result is xzy 2/3/0"
);
assert_eq!(
(result[1].entries[0].grid_entry.x, result[1].entries[0].grid_entry.y),
(21, 7),
"Bbox [1,0,0,1,0] - 2nd result is xzy 5/20/7"
);
}
#[cfg(test)]
fn truncate_coalesce_results(results: Vec<CoalesceContext>) -> Vec<CoalesceContext> {
let mut new_results = Vec::new();
let max_relevance = if results.len() == 0 { 1.0 } else { results[0].relev };
for r in results {
if max_relevance - r.relev < 0.25 {
let context = r.clone();
new_results.push(context);
}
}
new_results
}
// TODO: add proximity test with max score
// TODO: add sort tests?
|
use std::cell::RefCell;
use std::io::{stdout, Write};
use std::rc::Rc;
use super::window::TermionWindow;
use crate::event_controller::window::{Layout, Window, WindowPosition, WindowSize};
use termion::clear;
use termion::color::DetectColors;
use termion::raw::IntoRawMode;
use termion::screen::AlternateScreen;
const STATUS_HEIGHT: u32 = 1;
pub struct TermionLayout {
height: u32,
width: u32,
writer: Rc<RefCell<Box<dyn Write>>>,
}
impl TermionLayout {
pub fn new() -> Self {
let mut stdout = AlternateScreen::from(stdout().into_raw_mode().unwrap());
debug!("{} colors available", stdout.available_colors().unwrap());
write!(stdout, "{}", clear::All).unwrap();
let (width, height) = termion::terminal_size().unwrap();
Self {
writer: Rc::new(RefCell::new(Box::new(stdout))),
height: u32::from(height),
width: u32::from(width),
}
}
}
impl Layout for TermionLayout {
fn create_view_window(&self) -> Box<dyn Window> {
let window = TermionWindow::new(
self.writer.clone(),
WindowPosition { y: 0, x: 0 },
WindowSize {
height: self.height - STATUS_HEIGHT,
width: self.width,
},
);
Box::new(window)
}
fn create_new_status_bar_window(&self) -> Box<dyn Window> {
let window = TermionWindow::new(
self.writer.clone(),
WindowPosition {
y: self.height - STATUS_HEIGHT,
x: 0,
},
WindowSize {
height: STATUS_HEIGHT,
width: self.width,
},
);
Box::new(window)
}
}
|
/// Refers to a part of a span
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Segment {
/// ID of the span this segment refers to
pub span_id: usize,
/// Beginning of this segment within the span (included)
pub start: usize,
/// End of this segment within the span (excluded)
pub end: usize,
/// Width of this segment
pub width: usize,
}
impl Segment {
#[cfg(test)]
pub fn with_text<'a>(self, text: &'a str) -> SegmentWithText<'a> {
SegmentWithText { text, seg: self }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct SegmentWithText<'a> {
pub seg: Segment,
pub text: &'a str,
}
|
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate futures;
#[macro_use]
extern crate serde_json;
extern crate clap;
extern crate deno;
mod ansi;
pub mod compiler;
pub mod deno_dir;
pub mod errors;
pub mod flags;
mod fs;
mod global_timer;
mod http_body;
mod http_util;
pub mod js_errors;
pub mod msg;
pub mod msg_util;
pub mod ops;
pub mod permissions;
mod repl;
pub mod resolve_addr;
pub mod resources;
mod startup_data;
pub mod state;
mod tokio_util;
mod tokio_write;
pub mod version;
pub mod worker;
use crate::errors::RustOrJsError;
use crate::state::ThreadSafeState;
use crate::worker::root_specifier_to_url;
use crate::worker::Worker;
use futures::lazy;
use futures::Future;
use log::{LevelFilter, Metadata, Record};
use std::env;
static LOGGER: Logger = Logger;
struct Logger;
impl log::Log for Logger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= log::max_level()
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
println!("{} RS - {}", record.level(), record.args());
}
}
fn flush(&self) {}
}
fn print_err_and_exit(err: RustOrJsError) {
eprintln!("{}", err.to_string());
std::process::exit(1);
}
fn js_check<E>(r: Result<(), E>)
where
E: Into<RustOrJsError>,
{
if let Err(err) = r {
print_err_and_exit(err.into());
}
}
// TODO(ry) Move this to main.rs
pub fn print_file_info(worker: &Worker, url: &str) {
let maybe_out =
worker::fetch_module_meta_data_and_maybe_compile(&worker.state, url, ".");
if let Err(err) = maybe_out {
println!("{}", err);
return;
}
let out = maybe_out.unwrap();
println!("{} {}", ansi::bold("local:".to_string()), &(out.filename));
println!(
"{} {}",
ansi::bold("type:".to_string()),
msg::enum_name_media_type(out.media_type)
);
if out.maybe_output_code_filename.is_some() {
println!(
"{} {}",
ansi::bold("compiled:".to_string()),
out.maybe_output_code_filename.as_ref().unwrap(),
);
}
if out.maybe_source_map_filename.is_some() {
println!(
"{} {}",
ansi::bold("map:".to_string()),
out.maybe_source_map_filename.as_ref().unwrap()
);
}
if let Some(deps) = worker.modules.deps(&out.module_name) {
println!("{}{}", ansi::bold("deps:\n".to_string()), deps.name);
if let Some(ref depsdeps) = deps.deps {
for d in depsdeps {
println!("{}", d);
}
}
} else {
println!(
"{} cannot retrieve full dependency graph",
ansi::bold("deps:".to_string()),
);
}
}
fn main() {
#[cfg(windows)]
ansi_term::enable_ansi_support().ok(); // For Windows 10
log::set_logger(&LOGGER).unwrap();
let args = env::args().collect();
let (mut flags, mut rest_argv) =
flags::set_flags(args).unwrap_or_else(|err| {
eprintln!("{}", err);
std::process::exit(1)
});
log::set_max_level(if flags.log_debug {
LevelFilter::Debug
} else {
LevelFilter::Warn
});
if flags.fmt {
rest_argv.insert(1, "https://deno.land/std/prettier/main.ts".to_string());
flags.allow_read = true;
flags.allow_write = true;
}
let should_prefetch = flags.prefetch || flags.info;
let should_display_info = flags.info;
let state = ThreadSafeState::new(flags, rest_argv, ops::op_selector_std);
let mut worker = Worker::new(
"main".to_string(),
startup_data::deno_isolate_init(),
state.clone(),
);
// TODO(ry) somehow combine the two branches below. They're very similar but
// it's difficult to get the types to workout.
if state.flags.eval {
let main_future = lazy(move || {
js_check(worker.execute("denoMain()"));
// Wrap provided script in async function so asynchronous methods
// work. This is required until top-level await is not supported.
let js_source = format!(
"async function _topLevelWrapper(){{
{}
}}
_topLevelWrapper();
",
&state.argv[1]
);
// ATM imports in `deno eval` are not allowed
// TODO Support ES modules once Worker supports evaluating anonymous modules.
js_check(worker.execute(&js_source));
worker.then(|result| {
js_check(result);
Ok(())
})
});
tokio_util::run(main_future);
} else if let Some(main_module) = state.main_module() {
// Normal situation of executing a module.
let main_future = lazy(move || {
// Setup runtime.
js_check(worker.execute("denoMain()"));
debug!("main_module {}", main_module);
let main_url = root_specifier_to_url(&main_module).unwrap();
worker
.execute_mod_async(&main_url, should_prefetch)
.and_then(move |worker| {
if should_display_info {
// Display file info and exit. Do not run file
print_file_info(&worker, &main_module);
std::process::exit(0);
}
worker.then(|result| {
js_check(result);
Ok(())
})
}).map_err(|(err, _worker)| print_err_and_exit(err))
});
tokio_util::run(main_future);
} else {
// REPL situation.
let main_future = lazy(move || {
// Setup runtime.
js_check(worker.execute("denoMain()"));
worker
.then(|result| {
js_check(result);
Ok(())
}).map_err(|(err, _worker): (RustOrJsError, Worker)| {
print_err_and_exit(err)
})
});
tokio_util::run(main_future);
}
}
|
use plugin_interface;
use plugin_interface::{PluginResult, PuszRow, PuszRowIdentifier, PuszRowBuilder};
#[derive(Debug)]
struct CalcPlugin {
}
impl plugin_interface::Plugin for CalcPlugin {
fn query(&mut self, query: &str) -> PluginResult {
match meval::eval_str(&query) {
Ok(result) => {
let result = result.to_string();
PluginResult::Ok(vec![PuszRowBuilder::new(result, PuszRowIdentifier::new(self.name(), String::new())).build().unwrap()])
}
Err(err) => {
PluginResult::Error(format!("{:?}", err))
}
}
}
fn name(&self) -> &'static str {
"calc"
}
}
#[no_mangle]
pub extern "C" fn load(plugin_interface_version : &str) -> Result<Box<dyn plugin_interface::Plugin>, String> {
if plugin_interface_version == plugin_interface::COMMON_INTERFACE_VERSION {
Ok(Box::new(CalcPlugin{}))
} else {
Err(format!("compatible with: {} but your version is: {}", plugin_interface::COMMON_INTERFACE_VERSION, plugin_interface_version))
}
}
#[no_mangle]
pub extern "C" fn introduce() -> Box<dyn plugin_interface::Plugin> {
// let x = Box::new(CalcPlugin{});
// Box::into_raw(x)
Box::new(CalcPlugin{})
}
#[cfg(test)]
mod tests {
use super::*;
use plugin_interface::*;
fn assert_ok_result(expression : &str, expected_result : f64) {
let y = CalcPlugin{}.query(expression);
if let PluginResult::Ok(result) = y {
let result = &result[0];
assert_eq!(expected_result.to_string(), result.main_entry.label);
return;
}
// panic!("expected ok result got: {:?}", y);
// if let PluginResult::Ok(x) = CalcPlugin{}.query(expression) {
//
// }
}
#[test]
fn it_works() {
assert_ok_result("2+2", 4.0);
assert_ok_result("8/2*(2+2)", 16.0);
// assert_eq!(CalcPlugin{}.query("2 + 2"), 4);
}
}
|
#![cfg_attr(not(feature = "std"), no_std)]
use frame_support::{Parameter, decl_module, decl_event, decl_storage, decl_error, ensure, dispatch};
use sp_runtime::traits::{Member, AtLeast32Bit, Zero, StaticLookup, MaybeSerializeDeserialize};
use codec::{Codec};
use frame_system::{self as system, ensure_signed};
/// The module configuration trait.
pub trait Trait: frame_system::Trait {
/// The overarching event type.
type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>;
/// The units in which we record balances.
type Balance: Parameter + Member + AtLeast32Bit + Codec + Default + Copy + MaybeSerializeDeserialize;
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
type Error = Error<T>;
fn deposit_event() = default;
#[weight = 10_000]
pub fn give_me(
origin,
#[compact] amount: T::Balance
) -> dispatch::DispatchResult {
let origin = ensure_signed(origin)?;
let origin_account = origin.clone();
let origin_balance = <Balances<T>>::get(&origin_account);
ensure!(origin_balance.is_zero() , "AccountId can request tokens only once");
<Balances<T>>::insert(&origin_account, amount);
<TotalSupply<T>>::mutate(|total_supply| *total_supply += amount);
Ok(())
}
#[weight = 10_000]
fn transfer(origin,
target: <T::Lookup as StaticLookup>::Source,
#[compact] amount: T::Balance
) {
let origin = ensure_signed(origin)?;
let origin_account = origin.clone();
let origin_balance = <Balances<T>>::get(&origin_account);
let target = T::Lookup::lookup(target)?;
ensure!(!amount.is_zero(), Error::<T>::AmountZero);
ensure!(origin_balance >= amount, Error::<T>::BalanceLow);
Self::deposit_event(RawEvent::Transferred(origin, target.clone(), amount));
<Balances<T>>::insert(origin_account, origin_balance - amount);
<Balances<T>>::mutate(target, |balance| *balance += amount);
}
}
}
decl_event! {
pub enum Event<T> where
<T as frame_system::Trait>::AccountId,
<T as Trait>::Balance,
{
/// Some assets were transferred.
Transferred(AccountId, AccountId, Balance),
}
}
decl_error! {
pub enum Error for Module<T: Trait> {
/// Transfer amount should be non-zero
AmountZero,
/// Account balance must be greater than or equal to the transfer amount
BalanceLow,
/// Balance should be non-zero
BalanceZero,
}
}
decl_storage! {
trait Store for Module<T: Trait> as Tokens {
/// The number of units of assets held by any given account.
Balances: map hasher(blake2_128_concat) T::AccountId => T::Balance;
/// The total unit supply of an asset.
TotalSupply: T::Balance;
}
}
// The main implementation block for the module.
impl<T: Trait> Module<T> {
// Public immutables
/// Get the asset `id` balance of `who`.
pub fn balance(who: T::AccountId) -> T::Balance {
<Balances<T>>::get(who)
}
/// Get the total supply of an asset `id`.
pub fn total_supply() -> T::Balance {
<TotalSupply<T>>::get()
}
}
#[cfg(test)]
mod tests {
use super::*;
use frame_support::{impl_outer_origin, assert_ok, assert_noop, parameter_types, weights::Weight};
use sp_core::H256;
// The testing primitives are very useful for avoiding having to work with signatures
// or public keys. `u64` is used as the `AccountId` and no `Signature`s are required.
use sp_runtime::{Perbill, traits::{BlakeTwo256, IdentityLookup}, testing::Header};
impl_outer_origin! {
pub enum Origin for Test where system = frame_system {}
}
// For testing the pallet, we construct most of a mock runtime. This means
// first constructing a configuration type (`Test`) which `impl`s each of the
// configuration traits of pallets we want to use.
#[derive(Clone, Eq, PartialEq)]
pub struct Test;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::one();
}
impl frame_system::Trait for Test {
type Origin = Origin;
type Index = u64;
type Call = ();
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = u64;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = ();
type BlockHashCount = BlockHashCount;
type MaximumBlockWeight = MaximumBlockWeight;
type DbWeight = ();
type BlockExecutionWeight = ();
type ExtrinsicBaseWeight = ();
type AvailableBlockRatio = AvailableBlockRatio;
type MaximumBlockLength = MaximumBlockLength;
type MaximumExtrinsicWeight = MaximumBlockWeight;
type Version = ();
type ModuleToIndex = ();
type AccountData = ();
type OnNewAccount = ();
type OnKilledAccount = ();
}
impl Trait for Test {
type Event = ();
type Balance = u128;
}
type Assets = Module<Test>;
// This function basically just builds a genesis storage key/value store according to
// our desired mockup.
fn new_test_ext() -> sp_io::TestExternalities {
frame_system::GenesisConfig::default().build_storage::<Test>().unwrap().into()
}
#[test]
fn querying_total_supply_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::give_me(Origin::signed(1), 100));
assert_eq!(Assets::balance(1), 100);
assert_ok!(Assets::transfer(Origin::signed(1), 2, 50));
assert_eq!(Assets::balance(1), 50);
assert_eq!(Assets::balance(2), 50);
assert_ok!(Assets::transfer(Origin::signed(2), 3, 31));
assert_eq!(Assets::balance(1), 50);
assert_eq!(Assets::balance(2), 19);
assert_eq!(Assets::balance(3), 31);
assert_eq!(Assets::total_supply(), 100);
});
}
#[test]
fn transferring_amount_above_available_balance_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::give_me(Origin::signed(1), 100));
assert_eq!(Assets::balance(1), 100);
assert_ok!(Assets::transfer(Origin::signed(1), 2, 50));
assert_eq!(Assets::balance(1), 50);
assert_eq!(Assets::balance(2), 50);
});
}
#[test]
fn transferring_amount_more_than_available_balance_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::give_me(Origin::signed(1), 100));
assert_eq!(Assets::balance(1), 100);
assert_ok!(Assets::transfer(Origin::signed(1), 2, 70));
assert_eq!(Assets::balance(1), 30);
assert_eq!(Assets::balance(2), 70);
assert_noop!(Assets::transfer(Origin::signed(1), 1, 50), Error::<Test>::BalanceLow);
});
}
#[test]
fn transferring_less_than_one_unit_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::give_me(Origin::signed(1), 100));
assert_eq!(Assets::balance(1), 100);
assert_noop!(Assets::transfer(Origin::signed(1), 2, 0), Error::<Test>::AmountZero);
});
}
#[test]
fn transferring_more_units_than_total_supply_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::give_me(Origin::signed(1), 100));
assert_eq!(Assets::balance(1), 100);
assert_noop!(Assets::transfer(Origin::signed(1), 2, 101), Error::<Test>::BalanceLow);
});
}
}
|
use serde::Serialize;
use crate::domain::catalogue::{Author, Catalogue, Category, Publication, Statistics};
#[derive(Serialize)]
pub struct StatisticsDto {
pub views: u32,
pub unique_views: u32,
pub readings: u32,
pub likes: u32,
pub reviews: u32,
pub stars: f32,
}
impl From<&Statistics> for StatisticsDto {
fn from(statistics: &Statistics) -> Self {
StatisticsDto {
views: statistics.views(),
unique_views: statistics.unique_views(),
readings: statistics.readings(),
likes: statistics.likes(),
reviews: statistics.reviews(),
stars: statistics.stars(),
}
}
}
#[derive(Serialize)]
pub struct AuthorDto {
pub id: String,
pub username: String,
pub name: String,
pub lastname: String,
}
impl From<&Author> for AuthorDto {
fn from(author: &Author) -> Self {
AuthorDto {
id: author.id().to_string(),
username: author.username().to_string(),
name: author.name().to_string(),
lastname: author.lastname().to_string(),
}
}
}
#[derive(Serialize)]
pub struct CategoryDto {
pub id: String,
pub name: String,
}
impl From<&Category> for CategoryDto {
fn from(category: &Category) -> Self {
CategoryDto {
id: category.id().to_string(),
name: category.name().to_string(),
}
}
}
#[derive(Serialize)]
pub struct PublicationDto {
pub id: String,
pub author: AuthorDto,
pub name: String,
pub synopsis: String,
pub category: CategoryDto,
pub tags: Vec<String>,
pub cover: String,
pub statistics: StatisticsDto,
pub premium: bool,
pub pages: usize,
}
impl From<&Publication> for PublicationDto {
fn from(publication: &Publication) -> Self {
PublicationDto {
id: publication.id().to_string(),
author: AuthorDto::from(publication.author()),
name: publication.name().to_string(),
synopsis: publication.synopsis().to_string(),
category: CategoryDto::from(publication.category()),
tags: publication
.tags()
.iter()
.map(|tag| tag.to_string())
.collect(),
cover: publication.cover().to_string(),
statistics: StatisticsDto::from(publication.statistics()),
premium: publication.is_premium(),
pages: publication.pages(),
}
}
}
#[derive(Serialize)]
pub struct CatalogueDto {
id: String,
authors: Vec<AuthorDto>,
publications: Vec<PublicationDto>,
}
impl From<&Catalogue> for CatalogueDto {
fn from(catalogue: &Catalogue) -> Self {
CatalogueDto {
id: catalogue.base().id().to_string(),
authors: catalogue
.authors()
.iter()
.map(|author| AuthorDto::from(author))
.collect(),
publications: catalogue
.publications()
.iter()
.map(|publication| PublicationDto::from(publication))
.collect(),
}
}
}
|
use entry::Entry;
use rand::Rng;
use rand::XorShiftRng;
use std::ops::{Add, Index, IndexMut, Sub};
use treap::node::Node;
use treap::tree;
/// An ordered map implemented using a treap.
///
/// A treap is a tree that satisfies both the binary search tree property and a heap property. Each
/// node has a key, a value, and a priority. The key of any node is greater than all keys in its
/// left subtree and less than all keys occuring in its right subtree. The priority of a node is
/// greater than the priority of all nodes in its subtrees. By randomly generating priorities, the
/// expected height of the tree is proportional to the logarithm of the number of keys.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(0, 1);
/// map.insert(3, 4);
///
/// assert_eq!(map[&0], 1);
/// assert_eq!(map.get(&1), None);
/// assert_eq!(map.len(), 2);
///
/// assert_eq!(map.min(), Some(&0));
/// assert_eq!(map.ceil(&2), Some(&3));
///
/// map[&0] = 2;
/// assert_eq!(map.remove(&0), Some((0, 2)));
/// assert_eq!(map.remove(&1), None);
/// ```
pub struct TreapMap<T, U> {
tree: tree::Tree<T, U>,
rng: XorShiftRng,
}
impl<T, U> TreapMap<T, U>
where
T: Ord,
{
/// Constructs a new, empty `TreapMap<T, U>`.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let map: TreapMap<u32, u32> = TreapMap::new();
/// ```
pub fn new() -> Self {
TreapMap {
tree: None,
rng: XorShiftRng::new_unseeded(),
}
}
/// Inserts a key-value pair into the map. If the key already exists in the map, it will return
/// and replace the old key-value pair.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// assert_eq!(map.insert(1, 1), None);
/// assert_eq!(map.get(&1), Some(&1));
/// assert_eq!(map.insert(1, 2), Some((1, 1)));
/// assert_eq!(map.get(&1), Some(&2));
/// ```
pub fn insert(&mut self, key: T, value: U) -> Option<(T, U)> {
let TreapMap { ref mut tree, ref mut rng } = self;
let new_node = Node::new(key, value, rng.next_u32());
tree::insert(tree, new_node).and_then(|entry| {
let Entry { key, value } = entry;
Some((key, value))
})
}
/// Removes a key-value pair from the map. If the key exists in the map, it will return the
/// associated key-value pair. Otherwise it will return `None`.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.remove(&1), Some((1, 1)));
/// assert_eq!(map.remove(&1), None);
/// ```
pub fn remove(&mut self, key: &T) -> Option<(T, U)> {
let TreapMap { ref mut tree, .. } = self;
tree::remove(tree, key).and_then(|entry| {
let Entry { key, value } = entry;
Some((key, value))
})
}
/// Checks if a key exists in the map.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// assert!(!map.contains_key(&0));
/// assert!(map.contains_key(&1));
/// ```
pub fn contains_key(&self, key: &T) -> bool {
self.get(key).is_some()
}
/// Returns an immutable reference to the value associated with a particular key. It will
/// return `None` if the key does not exist in the map.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.get(&0), None);
/// assert_eq!(map.get(&1), Some(&1));
/// ```
pub fn get(&self, key: &T) -> Option<&U> {
tree::get(&self.tree, key).map(|entry| &entry.value)
}
/// Returns a mutable reference to the value associated with a particular key. Returns `None`
/// if such a key does not exist.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// *map.get_mut(&1).unwrap() = 2;
/// assert_eq!(map.get(&1), Some(&2));
/// ```
pub fn get_mut(&mut self, key: &T) -> Option<&mut U> {
tree::get_mut(&mut self.tree, key).map(|entry| &mut entry.value)
}
/// Returns the number of elements in the map.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.len(), 1);
/// ```
pub fn len(&self) -> usize {
match self.tree {
None => 0,
Some(ref node) => node.len,
}
}
/// Returns `true` if the map is empty.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let map: TreapMap<u32, u32> = TreapMap::new();
/// assert!(map.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, removing all values.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// map.insert(2, 2);
/// map.clear();
/// assert_eq!(map.is_empty(), true);
/// ```
pub fn clear(&mut self) {
self.tree = None;
}
/// Returns a key in the map that is less than or equal to a particular key. Returns `None` if
/// such a key does not exist.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.floor(&0), None);
/// assert_eq!(map.floor(&2), Some(&1));
/// ```
pub fn floor(&self, key: &T) -> Option<&T> {
tree::floor(&self.tree, key).map(|entry| &entry.key)
}
/// Returns a key in the map that is greater than or equal to a particular key. Returns `None`
/// if such a key does not exist.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// assert_eq!(map.ceil(&0), Some(&1));
/// assert_eq!(map.ceil(&2), None);
/// ```
pub fn ceil(&self, key: &T) -> Option<&T> {
tree::ceil(&self.tree, key).map(|entry| &entry.key)
}
/// Returns the minimum key of the map. Returns `None` if the map is empty.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// map.insert(3, 3);
/// assert_eq!(map.min(), Some(&1));
/// ```
pub fn min(&self) -> Option<&T> {
tree::min(&self.tree).map(|entry| &entry.key)
}
/// Returns the maximum key of the map. Returns `None` if the map is empty.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// map.insert(3, 3);
/// assert_eq!(map.max(), Some(&3));
/// ```
pub fn max(&self) -> Option<&T> {
tree::max(&self.tree).map(|entry| &entry.key)
}
/// Splits the map and returns the right part of the map. If `inclusive` is true, then the map
/// will retain the given key if it exists. Otherwise, the right part of the map will contain
/// the key if it exists.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// map.insert(2, 2);
/// map.insert(3, 3);
///
/// let split = map.split_off(&2, true);
/// assert_eq!(map[&1], 1);
/// assert_eq!(map[&2], 2);
/// assert_eq!(split[&3], 3);
/// ```
pub fn split_off(&mut self, key: &T, inclusive: bool) -> Self {
let TreapMap { ref mut tree, .. } = self;
let (mut split_node, ret) = tree::split(tree, key);
if inclusive {
tree::merge(tree, split_node);
TreapMap { tree: ret, rng: XorShiftRng::new_unseeded() }
} else {
tree::merge(&mut split_node, ret);
TreapMap { tree: split_node, rng: XorShiftRng::new_unseeded() }
}
}
/// Returns the union of two maps. If there is a key that is found in both `left` and `right`,
/// the union will contain the value associated with the key in `left`. The `+`
/// operator is implemented to take the union of two maps.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut n = TreapMap::new();
/// n.insert(1, 1);
/// n.insert(2, 2);
///
/// let mut m = TreapMap::new();
/// m.insert(2, 3);
/// m.insert(3, 3);
///
/// let union = TreapMap::union(n, m);
/// assert_eq!(
/// union.iter().collect::<Vec<(&u32, &u32)>>(),
/// vec![(&1, &1), (&2, &2), (&3, &3)],
/// );
/// ```
pub fn union(left: Self, right: Self) -> Self {
let TreapMap { tree: left_tree, rng } = left;
let TreapMap { tree: right_tree, .. } = right;
TreapMap { tree: tree::union(left_tree, right_tree, false), rng }
}
/// Returns the intersection of two maps. If there is a key that is found in both `left` and
/// `right`, the intersection will contain the value associated with the key in `left`.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut n = TreapMap::new();
/// n.insert(1, 1);
/// n.insert(2, 2);
///
/// let mut m = TreapMap::new();
/// m.insert(2, 3);
/// m.insert(3, 3);
///
/// let intersection = TreapMap::intersection(n, m);
/// assert_eq!(
/// intersection.iter().collect::<Vec<(&u32, &u32)>>(),
/// vec![(&2, &2)],
/// );
/// ```
pub fn intersection(left: Self, right: Self) -> Self {
let TreapMap { tree: left_tree, rng } = left;
TreapMap { tree: tree::intersection(left_tree, right.tree, false), rng }
}
/// Returns the difference of `left` and `right`. The returned map will contain all entries
/// that do not have a key in `right`. The `-` operator is implemented to take the difference
/// of two maps.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut n = TreapMap::new();
/// n.insert(1, 1);
/// n.insert(2, 2);
///
/// let mut m = TreapMap::new();
/// m.insert(2, 3);
/// m.insert(3, 3);
///
/// let difference = TreapMap::difference(n, m);
/// assert_eq!(
/// difference.iter().collect::<Vec<(&u32, &u32)>>(),
/// vec![(&1, &1)],
/// );
/// ```
pub fn difference(left: Self, right: Self) -> Self {
let TreapMap { tree: left_tree, rng } = left;
TreapMap { tree: tree::difference(left_tree, right.tree, false, false), rng }
}
/// Returns the symmetric difference of `left` and `right`. The returned map will contain all
/// entries that exist in one map, but not both maps.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut n = TreapMap::new();
/// n.insert(1, 1);
/// n.insert(2, 2);
///
/// let mut m = TreapMap::new();
/// m.insert(2, 3);
/// m.insert(3, 3);
///
/// let symmetric_difference = TreapMap::symmetric_difference(n, m);
/// assert_eq!(
/// symmetric_difference.iter().collect::<Vec<(&u32, &u32)>>(),
/// vec![(&1, &1), (&3, &3)],
/// );
/// ```
pub fn symmetric_difference(left: Self, right:Self) -> Self {
let TreapMap { tree: left_tree, rng } = left;
let TreapMap { tree: right_tree, .. } = right;
TreapMap { tree: tree::difference(left_tree, right_tree, false, true), rng }
}
/// Returns an iterator over the map. The iterator will yield key-value pairs using in-order
/// traversal.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// map.insert(2, 2);
///
/// let mut iterator = map.iter();
/// assert_eq!(iterator.next(), Some((&1, &1)));
/// assert_eq!(iterator.next(), Some((&2, &2)));
/// assert_eq!(iterator.next(), None);
/// ```
pub fn iter(&self) -> TreapMapIter<T, U> {
TreapMapIter {
current: &self.tree,
stack: Vec::new(),
}
}
/// Returns a mutable iterator over the map. The iterator will yield key-value pairs using
/// in-order traversal.
///
/// # Examples
/// ```
/// use extended_collections::treap::TreapMap;
///
/// let mut map = TreapMap::new();
/// map.insert(1, 1);
/// map.insert(2, 2);
///
/// for (key, value) in &mut map {
/// *value += 1;
/// }
///
/// let mut iterator = map.iter_mut();
/// assert_eq!(iterator.next(), Some((&1, &mut 2)));
/// assert_eq!(iterator.next(), Some((&2, &mut 3)));
/// assert_eq!(iterator.next(), None);
/// ```
pub fn iter_mut(&mut self) -> TreapMapIterMut<T, U> {
TreapMapIterMut {
current: self.tree.as_mut().map(|node| &mut **node),
stack: Vec::new(),
}
}
}
impl<T, U> IntoIterator for TreapMap<T, U>
where
T: Ord,
{
type Item = (T, U);
type IntoIter = TreapMapIntoIter<T, U>;
fn into_iter(self) -> Self::IntoIter {
Self::IntoIter {
current: self.tree,
stack: Vec::new(),
}
}
}
impl<'a, T, U> IntoIterator for &'a TreapMap<T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a U);
type IntoIter = TreapMapIter<'a, T, U>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T, U> IntoIterator for &'a mut TreapMap<T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a mut U);
type IntoIter = TreapMapIterMut<'a, T, U>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
/// An owning iterator for `TreapMap<T, U>`.
///
/// This iterator traverses the elements of the map in-order and yields owned entries.
pub struct TreapMapIntoIter<T, U> {
current: tree::Tree<T, U>,
stack: Vec<Node<T, U>>,
}
impl<T, U> Iterator for TreapMapIntoIter<T, U>
where
T: Ord,
{
type Item = (T, U);
fn next(&mut self) -> Option<Self::Item> {
while let Some(mut node) = self.current.take() {
self.current = node.left.take();
self.stack.push(*node);
}
self.stack.pop().map(|node| {
let Node {
entry: Entry { key, value },
right,
..
} = node;
self.current = right;
(key, value)
})
}
}
/// An iterator for `TreapMap<T, U>`.
///
/// This iterator traverses the elements of the map in-order and yields immutable references.
pub struct TreapMapIter<'a, T, U>
where
T: 'a,
U: 'a,
{
current: &'a tree::Tree<T, U>,
stack: Vec<&'a Node<T, U>>,
}
impl<'a, T, U> Iterator for TreapMapIter<'a, T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a U);
fn next(&mut self) -> Option<Self::Item> {
while let Some(ref node) = self.current {
self.current = &node.left;
self.stack.push(node);
}
self.stack.pop().map(|node| {
let Node {
entry: Entry { ref key, ref value },
ref right,
..
} = node;
self.current = right;
(key, value)
})
}
}
type BorrowedIterEntryMut<'a, T, U> = Option<(&'a mut Entry<T, U>, BorrowedTreeMut<'a, T, U>)>;
type BorrowedTreeMut<'a, T, U> = Option<&'a mut Node<T, U>>;
/// A mutable iterator for `TreapMap<T, U>`.
///
/// This iterator traverses the elements of the map in-order and yields mutable references.
pub struct TreapMapIterMut<'a, T, U>
where
T: 'a,
U: 'a,
{
current: Option<&'a mut Node<T, U>>,
stack: Vec<BorrowedIterEntryMut<'a, T, U>>,
}
impl<'a, T, U> Iterator for TreapMapIterMut<'a, T, U>
where
T: 'a + Ord,
U: 'a,
{
type Item = (&'a T, &'a mut U);
fn next(&mut self) -> Option<Self::Item> {
let TreapMapIterMut { ref mut current, ref mut stack } = self;
while current.is_some() {
stack.push(current.take().map(|node| {
*current = node.left.as_mut().map(|node| &mut **node);
(&mut node.entry, node.right.as_mut().map(|node| &mut **node))
}));
}
stack.pop().and_then(|pair_opt| {
match pair_opt {
Some(pair) => {
let (entry, right) = pair;
let Entry { ref key, ref mut value } = entry;
*current = right;
Some((key, value))
},
None => None,
}
})
}
}
impl<T, U> Default for TreapMap<T, U>
where
T: Ord,
{
fn default() -> Self {
Self::new()
}
}
impl<T, U> Add for TreapMap<T, U>
where
T: Ord,
{
type Output = TreapMap<T, U>;
fn add(self, other: TreapMap<T, U>) -> TreapMap<T, U> {
Self::union(self, other)
}
}
impl<T, U> Sub for TreapMap<T, U>
where
T: Ord,
{
type Output = TreapMap<T, U>;
fn sub(self, other: TreapMap<T, U>) -> TreapMap<T, U> {
Self::difference(self, other)
}
}
impl<'a, T, U> Index<&'a T> for TreapMap<T, U>
where
T: Ord,
{
type Output = U;
fn index(&self, key: &T) -> &Self::Output {
self.get(key).expect("Key does not exist.")
}
}
impl<'a, T, U> IndexMut<&'a T> for TreapMap<T, U>
where
T: Ord,
{
fn index_mut(&mut self, key: &T) -> &mut Self::Output {
self.get_mut(key).expect("Key does not exist.")
}
}
#[cfg(test)]
mod tests {
use super::TreapMap;
#[test]
fn test_len_empty() {
let map: TreapMap<u32, u32> = TreapMap::new();
assert_eq!(map.len(), 0);
}
#[test]
fn test_is_empty() {
let map: TreapMap<u32, u32> = TreapMap::new();
assert!(map.is_empty());
}
#[test]
fn test_min_max_empty() {
let map: TreapMap<u32, u32> = TreapMap::new();
assert_eq!(map.min(), None);
assert_eq!(map.max(), None);
}
#[test]
fn test_insert() {
let mut map = TreapMap::new();
assert_eq!(map.insert(1, 1), None);
assert!(map.contains_key(&1));
assert_eq!(map.get(&1), Some(&1));
}
#[test]
fn test_insert_replace() {
let mut map = TreapMap::new();
assert_eq!(map.insert(1, 1), None);
assert_eq!(map.insert(1, 3), Some((1, 1)));
assert_eq!(map.get(&1), Some(&3));
}
#[test]
fn test_remove() {
let mut map = TreapMap::new();
map.insert(1, 1);
assert_eq!(map.remove(&1), Some((1, 1)));
assert!(!map.contains_key(&1));
}
#[test]
fn test_min_max() {
let mut map = TreapMap::new();
map.insert(1, 1);
map.insert(3, 3);
map.insert(5, 5);
assert_eq!(map.min(), Some(&1));
assert_eq!(map.max(), Some(&5));
}
#[test]
fn test_get_mut() {
let mut map = TreapMap::new();
map.insert(1, 1);
{
let value = map.get_mut(&1);
*value.unwrap() = 3;
}
assert_eq!(map.get(&1), Some(&3));
}
#[test]
fn test_floor_ceil() {
let mut map = TreapMap::new();
map.insert(1, 1);
map.insert(3, 3);
map.insert(5, 5);
assert_eq!(map.floor(&0), None);
assert_eq!(map.floor(&2), Some(&1));
assert_eq!(map.floor(&4), Some(&3));
assert_eq!(map.floor(&6), Some(&5));
assert_eq!(map.ceil(&0), Some(&1));
assert_eq!(map.ceil(&2), Some(&3));
assert_eq!(map.ceil(&4), Some(&5));
assert_eq!(map.ceil(&6), None);
}
#[test]
fn test_split_off_inclusive() {
let mut map = TreapMap::new();
map.insert(1, 1);
map.insert(2, 2);
map.insert(3, 3);
let split = map.split_off(&2, true);
assert_eq!(
map.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &1), (&2, &2)],
);
assert_eq!(
split.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&3, &3)],
);
}
#[test]
fn test_split_off_not_inclusive() {
let mut map = TreapMap::new();
map.insert(1, 1);
map.insert(2, 2);
map.insert(3, 3);
let split = map.split_off(&2, false);
assert_eq!(
map.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &1)],
);
assert_eq!(
split.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&2, &2), (&3, &3)],
);
}
#[test]
fn test_union() {
let mut n = TreapMap::new();
n.insert(1, 1);
n.insert(2, 2);
n.insert(3, 3);
let mut m = TreapMap::new();
m.insert(3, 5);
m.insert(4, 4);
m.insert(5, 5);
let union = n + m;
assert_eq!(
union.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &1), (&2, &2), (&3, &3), (&4, &4), (&5, &5)],
);
assert_eq!(union.len(), 5);
}
#[test]
fn test_intersection() {
let mut n = TreapMap::new();
n.insert(1, 1);
n.insert(2, 2);
n.insert(3, 3);
let mut m = TreapMap::new();
m.insert(3, 5);
m.insert(4, 4);
m.insert(5, 5);
let intersection = TreapMap::intersection(n, m);
assert_eq!(
intersection.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&3, &3)],
);
assert_eq!(intersection.len(), 1);
}
#[test]
fn test_difference() {
let mut n = TreapMap::new();
n.insert(1, 1);
n.insert(2, 2);
n.insert(3, 3);
let mut m = TreapMap::new();
m.insert(3, 5);
m.insert(4, 4);
m.insert(5, 5);
let difference = n - m;
assert_eq!(
difference.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &1), (&2, &2)],
);
assert_eq!(difference.len(), 2);
}
#[test]
fn test_symmetric_difference() {
let mut n = TreapMap::new();
n.insert(1, 1);
n.insert(2, 2);
n.insert(3, 3);
let mut m = TreapMap::new();
m.insert(3, 5);
m.insert(4, 4);
m.insert(5, 5);
let symmetric_difference = TreapMap::symmetric_difference(n, m);
assert_eq!(
symmetric_difference.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &1), (&2, &2), (&4, &4), (&5, &5)],
);
assert_eq!(symmetric_difference.len(), 4);
}
#[test]
fn test_into_iter() {
let mut map = TreapMap::new();
map.insert(1, 2);
map.insert(5, 6);
map.insert(3, 4);
assert_eq!(
map.into_iter().collect::<Vec<(u32, u32)>>(),
vec![(1, 2), (3, 4), (5, 6)],
);
}
#[test]
fn test_iter() {
let mut map = TreapMap::new();
map.insert(1, 2);
map.insert(5, 6);
map.insert(3, 4);
assert_eq!(
map.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &2), (&3, &4), (&5, &6)],
);
}
#[test]
fn test_iter_mut() {
let mut map = TreapMap::new();
map.insert(1, 2);
map.insert(5, 6);
map.insert(3, 4);
for (_, value) in &mut map {
*value += 1;
}
assert_eq!(
map.iter().collect::<Vec<(&u32, &u32)>>(),
vec![(&1, &3), (&3, &5), (&5, &7)],
);
}
}
|
//! JSON generation
use {items, utils};
use serde_json;
use std::{self, io};
/// The result type for JSON errors.
pub type JsonResult<T> = std::result::Result<T, JsonError>;
/// Errors that may occur during JSON operations.
#[derive(Debug)]
pub enum JsonError {
FailedToCreateDirectory(io::Error),
FailedToCreateJsonFile(io::Error),
FailedToWriteJsonAbiFile(serde_json::Error),
}
impl JsonError {
/// Returns a JSON error indicating that the creation of the
/// directory that will contain the JSON file failed.
pub fn failed_to_create_dir(err: io::Error) -> Self {
JsonError::FailedToCreateDirectory(err)
}
/// Returns a JSON error indicating that the creation of the JSON
/// abi file failed.
pub fn failed_to_create_json_file(err: io::Error) -> Self {
JsonError::FailedToCreateJsonFile(err)
}
/// Returns a JSON error indicating that the writing of the JSON
/// abi file failed.
pub fn failed_to_write_json_abi_file(err: serde_json::Error) -> Self {
JsonError::FailedToWriteJsonAbiFile(err)
}
}
impl std::fmt::Display for JsonError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
match self {
JsonError::FailedToCreateDirectory(err) => {
write!(f, "failed to create directory for JSON abi file: {:?}", err)
}
JsonError::FailedToCreateJsonFile(err) => {
write!(f, "failed to create JSON abi file: {:?}", err)
}
JsonError::FailedToWriteJsonAbiFile(err) => {
write!(f, "failed to write JSON abi file: {:?}", err)
}
}
}
}
impl std::error::Error for JsonError {
fn description(&self) -> &str {
match self {
JsonError::FailedToCreateDirectory(_) => {
"failed to create directory for the JSON abi file"
}
JsonError::FailedToCreateJsonFile(_) => "failed to create JSON abi file",
JsonError::FailedToWriteJsonAbiFile(_) => "failed to write JSON abi file",
}
}
fn cause(&self) -> Option<&std::error::Error> {
match self {
JsonError::FailedToCreateDirectory(err) => Some(err),
JsonError::FailedToCreateJsonFile(err) => Some(err),
JsonError::FailedToWriteJsonAbiFile(err) => Some(err),
}
}
}
/// Writes generated abi JSON file to destination in default target directory.
///
/// # Note
///
/// The generated JSON information may be used by offline tools around WebJS for example.
pub fn write_json_abi(intf: &items::Interface) -> JsonResult<()> {
use std::{env, fs, path};
let target = {
let mut target =
path::PathBuf::from(env::var("CARGO_TARGET_DIR").unwrap_or(".".to_owned()));
target.push("target");
target.push("json");
fs::create_dir_all(&target).map_err(|err| JsonError::failed_to_create_dir(err))?;
target.push(&format!("{}.json", intf.name()));
target
};
let mut f =
fs::File::create(target).map_err(|err| JsonError::failed_to_create_json_file(err))?;
let abi: Abi = intf.into();
serde_json::to_writer_pretty(&mut f, &abi)
.map_err(|err| JsonError::failed_to_write_json_abi_file(err))?;
Ok(())
}
#[derive(Serialize, Debug)]
pub struct FunctionEntry {
pub name: String,
#[serde(rename = "inputs")]
pub arguments: Vec<Argument>,
pub outputs: Vec<Argument>,
pub constant: bool,
pub payable: bool,
}
#[derive(Serialize, Debug)]
pub struct Argument {
pub name: String,
#[serde(rename = "type")]
pub type_: String,
}
#[derive(Serialize, Debug)]
pub struct ConstructorEntry {
#[serde(rename = "inputs")]
pub arguments: Vec<Argument>,
}
#[derive(Serialize, Debug)]
#[serde(tag = "type")]
pub enum AbiEntry {
#[serde(rename = "event")]
Event(EventEntry),
#[serde(rename = "function")]
Function(FunctionEntry),
#[serde(rename = "constructor")]
Constructor(ConstructorEntry),
}
#[derive(Serialize, Debug)]
pub struct EventInput {
pub name: String,
#[serde(rename = "type")]
pub type_: String,
pub indexed: bool,
}
#[derive(Serialize, Debug)]
pub struct EventEntry {
pub name: String,
pub inputs: Vec<EventInput>,
}
#[derive(Serialize, Debug)]
pub struct Abi(pub Vec<AbiEntry>);
impl<'a> From<&'a items::Interface> for Abi {
fn from(intf: &items::Interface) -> Self {
let mut result = Vec::new();
for item in intf.items() {
match *item {
items::Item::Event(ref event) => result.push(AbiEntry::Event(event.into())),
items::Item::Signature(ref signature) => result.push(AbiEntry::Function(signature.into())),
_ => {}
}
}
if let Some(constructor) = intf.constructor() {
result.push(AbiEntry::Constructor(FunctionEntry::from(constructor).into()));
}
Abi(result)
}
}
impl<'a> From<&'a items::Event> for EventEntry {
fn from(item: &items::Event) -> Self {
EventEntry {
name: item.name.to_string(),
inputs: item.indexed
.iter()
.map(|&(ref pat, ref ty)|
EventInput {
name: quote! { #pat }.to_string(),
type_: utils::canonicalize_type(ty),
indexed: true,
}
)
.chain(
item.data
.iter()
.map(|&(ref pat, ref ty)|
EventInput {
name: quote! { #pat }.to_string(),
type_: utils::canonicalize_type(ty),
indexed: false,
}
)
)
.collect(),
}
}
}
impl<'a> From<&'a items::Signature> for FunctionEntry {
fn from(item: &items::Signature) -> Self {
FunctionEntry {
name: item.name.to_string(),
arguments: item.arguments
.iter()
.map(|&(ref pat, ref ty)|
Argument {
name: quote! { #pat }.to_string(),
type_: utils::canonicalize_type(ty),
}
)
.collect(),
outputs: item.return_types
.iter()
.enumerate()
.map(|(idx, ty)| Argument { name: format!("returnValue{}", idx), type_: utils::canonicalize_type(ty) })
.collect(),
constant: item.is_constant,
payable: item.is_payable,
}
}
}
impl From<FunctionEntry> for ConstructorEntry {
fn from(func: FunctionEntry) -> Self {
ConstructorEntry { arguments: func.arguments }
}
}
|
#[doc = "Register `BRR` writer"]
pub type W = crate::W<BRR_SPEC>;
#[doc = "Port x Reset bit y (y= 0 .. 15)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BR0W_AW {
#[doc = "0: No action on the corresponding ODx bit"]
NoAction = 0,
#[doc = "1: Reset the ODx bit"]
Reset = 1,
}
impl From<BR0W_AW> for bool {
#[inline(always)]
fn from(variant: BR0W_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BR0` writer - Port x Reset bit y (y= 0 .. 15)"]
pub type BR0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, BR0W_AW>;
impl<'a, REG, const O: u8> BR0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No action on the corresponding ODx bit"]
#[inline(always)]
pub fn no_action(self) -> &'a mut crate::W<REG> {
self.variant(BR0W_AW::NoAction)
}
#[doc = "Reset the ODx bit"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(BR0W_AW::Reset)
}
}
#[doc = "Field `BR1` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR1_W;
#[doc = "Field `BR2` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR2_W;
#[doc = "Field `BR3` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR3_W;
#[doc = "Field `BR4` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR4_W;
#[doc = "Field `BR5` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR5_W;
#[doc = "Field `BR6` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR6_W;
#[doc = "Field `BR7` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR7_W;
#[doc = "Field `BR8` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR8_W;
#[doc = "Field `BR9` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR9_W;
#[doc = "Field `BR10` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR10_W;
#[doc = "Field `BR11` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR11_W;
#[doc = "Field `BR12` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR12_W;
#[doc = "Field `BR13` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR13_W;
#[doc = "Field `BR14` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR14_W;
#[doc = "Field `BR15` writer - Port x Reset bit y (y= 0 .. 15)"]
pub use BR0_W as BR15_W;
impl W {
#[doc = "Bit 0 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br0(&mut self) -> BR0_W<BRR_SPEC, 0> {
BR0_W::new(self)
}
#[doc = "Bit 1 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br1(&mut self) -> BR1_W<BRR_SPEC, 1> {
BR1_W::new(self)
}
#[doc = "Bit 2 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br2(&mut self) -> BR2_W<BRR_SPEC, 2> {
BR2_W::new(self)
}
#[doc = "Bit 3 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br3(&mut self) -> BR3_W<BRR_SPEC, 3> {
BR3_W::new(self)
}
#[doc = "Bit 4 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br4(&mut self) -> BR4_W<BRR_SPEC, 4> {
BR4_W::new(self)
}
#[doc = "Bit 5 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br5(&mut self) -> BR5_W<BRR_SPEC, 5> {
BR5_W::new(self)
}
#[doc = "Bit 6 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br6(&mut self) -> BR6_W<BRR_SPEC, 6> {
BR6_W::new(self)
}
#[doc = "Bit 7 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br7(&mut self) -> BR7_W<BRR_SPEC, 7> {
BR7_W::new(self)
}
#[doc = "Bit 8 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br8(&mut self) -> BR8_W<BRR_SPEC, 8> {
BR8_W::new(self)
}
#[doc = "Bit 9 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br9(&mut self) -> BR9_W<BRR_SPEC, 9> {
BR9_W::new(self)
}
#[doc = "Bit 10 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br10(&mut self) -> BR10_W<BRR_SPEC, 10> {
BR10_W::new(self)
}
#[doc = "Bit 11 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br11(&mut self) -> BR11_W<BRR_SPEC, 11> {
BR11_W::new(self)
}
#[doc = "Bit 12 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br12(&mut self) -> BR12_W<BRR_SPEC, 12> {
BR12_W::new(self)
}
#[doc = "Bit 13 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br13(&mut self) -> BR13_W<BRR_SPEC, 13> {
BR13_W::new(self)
}
#[doc = "Bit 14 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br14(&mut self) -> BR14_W<BRR_SPEC, 14> {
BR14_W::new(self)
}
#[doc = "Bit 15 - Port x Reset bit y (y= 0 .. 15)"]
#[inline(always)]
#[must_use]
pub fn br15(&mut self) -> BR15_W<BRR_SPEC, 15> {
BR15_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "GPIO port bit reset register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`brr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct BRR_SPEC;
impl crate::RegisterSpec for BRR_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`brr::W`](W) writer structure"]
impl crate::Writable for BRR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets BRR to value 0"]
impl crate::Resettable for BRR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! Gameboard controller.
use piston::input::GenericEvent;
use taquin::state::State;
use taquin::reducer::Reducer;
use std::collections::VecDeque;
use time::PreciseTime;
/// Handles events for Fifteen puzzle game.
pub struct GameboardController {
/// Stores the gameboard state.
pub gameboard: State,
/// Selected cell.
pub selected_cell: Option<[usize; 2]>,
/// Position of the cursor
cursor_pos: [f64; 2],
/// Tells if the game needs to solve.
solving: bool,
/// Time since last move was made.
time_since_last_move: f64,
/// A vector containing the moves needed to finish the game.
calculated_moves: VecDeque<(i32, i32)>,
}
impl GameboardController {
/// Creates a new gameboard controller.
pub fn new(state: State) -> GameboardController {
GameboardController {
gameboard: state,
selected_cell: None,
cursor_pos: [0.0; 2],
solving: false,
time_since_last_move: 0.0,
calculated_moves: VecDeque::new(),
}
}
/// Handles events.
pub fn event<E: GenericEvent>(&mut self, pos: [f64; 2], size: f64, e: &E) {
use piston::input::{Button, Key, MouseButton};
if let Some(idle) = e.idle_args() {
self.time_since_last_move += idle.dt;
}
if let Some(pos) = e.mouse_cursor_args() {
self.cursor_pos = pos;
}
if let Some(Button::Mouse(MouseButton::Left)) = e.press_args() {
// Find coordinates relative to upper left corner.
let x = self.cursor_pos[0] - pos[0];
let y = self.cursor_pos[1] - pos[1];
// Check that coordinates are inside board boundaries.
if x >= 0.0 && x <= size && y >= 0.0 && y <= size {
// Compute the cell position.
let cell_x = (x / size * ::SIZE.1 as f64) as usize;
let cell_y = (y / size * ::SIZE.0 as f64) as usize;
self.selected_cell = Some([cell_x, cell_y]);
}
}
let mut mv = match e.press_args() {
Some(Button::Keyboard(key)) => {
match key {
Key::Down => (1, 0),
Key::Up => (-1, 0),
Key::Left => (0, -1),
Key::Right => (0, 1),
Key::Space => {
self.solve();
self.solving = !self.solving;
(0, 0)
}
Key::S => {
self.gameboard.shuffle(5000);
(0, 0)
}
_ => (0, 0),
}
}
_ => (0, 0),
};
if self.solving {
mv = self.get_next_solved();
if mv == (0,0) {
self.solving != self.solving;
}
}
if self.gameboard.validate(mv) {
self.gameboard.modify(mv);
}
}
fn get_next_solved(&mut self) -> (i32, i32) {
if self.time_since_last_move < 0.0 {
return (0, 0);
} else {
self.time_since_last_move -= 0.0;
self.calculated_moves.pop_front().unwrap_or((0, 0))
}
}
fn solve(&mut self) {
let start = PreciseTime::now();
self.calculated_moves = Reducer::new(self.gameboard.clone()).reduce().unwrap_or(VecDeque::new());
let end = PreciseTime::now();
let diff = start.to(end).num_microseconds();
println!("Found solution of {:?} moves in {:?} us", self.calculated_moves.len(),diff.unwrap());
}
}
|
use hacspec_hmac::*;
use hacspec_lib::prelude::*;
struct HMACTestVectors<'a> {
key: &'a str,
txt: &'a str,
expected: &'a str,
}
// https://tools.ietf.org/html/rfc4231
const HMAC_KAT: [HMACTestVectors; 5] = [
HMACTestVectors {
key: "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b",
txt: "4869205468657265",
expected: "b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7",
},
HMACTestVectors {
key: "4a656665",
txt: "7768617420646f2079612077616e7420666f72206e6f7468696e673f",
expected: "5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843",
},
HMACTestVectors {
key: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
txt: "dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd",
expected: "773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe",
},
HMACTestVectors {
key: "0102030405060708090a0b0c0d0e0f10111213141516171819",
txt: "cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd",
expected: "82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b",
},
HMACTestVectors {
key: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
txt: "54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374",
expected: "60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54",
}
];
#[test]
fn test_hmac_kat() {
for kat in HMAC_KAT.iter() {
let hmac = hmac(&ByteSeq::from_hex(kat.key), &ByteSeq::from_hex(kat.txt));
assert_eq!(kat.expected, hmac.to_hex());
}
}
|
use std::env;
use std::path::PathBuf;
fn main() {
println!("cargo:rerun-if-changed=laszip/src/laszip_dll.cpp");
println!("cargo:rerun-if-changed=laszip/dll/laszip_api.c");
println!("cargo:rerun-if-changed=laszip/include/laszip/laszip_api.h");
// build laszip library
let dst = cmake::Config::new("laszip")
.define("LASZIP_BUILD_STATIC", "off")
.build();
// tell cargo to tell rustc where to find the compiled laszip
println!("cargo:rustc-link-search=native={}/lib", dst.display());
// Tell cargo to tell rustc to link to laszip statically
println!("cargo:rustc-link-lib=laszip_api");
// Tell cargo to invalidate the built crate whenever the wrapper changes
println!("cargo:rerun-if-changed=wrapper.h");
let bindings = bindgen::Builder::default()
.header("wrapper.h")
.header(format!(
"{}/include/laszip/laszip_api_version.h",
dst.display()
))
.generate()
.expect("Unable to generate bindings");
// Write the bindings to the $OUT_DIR/bindings.rs file.
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
|
#![allow(dead_code, unused_imports, unused_variables)]
use std::collections::{BTreeSet,HashMap,HashSet};
const DATA: &'static str = include_str!("../../../data/08");
type Input = Vec<usize>;
#[derive(Debug)]
struct Node {
children: Vec<Node>,
metadata: Vec<usize>,
}
impl Node {
fn parse(input: &mut impl Iterator<Item=usize>) -> Option<Node> {
let children = match input.next() {
Some(num) => num,
None => { return None }
};
let metadata = input.next().expect("No metadata");
let mut node = Node {
children: vec![],
metadata: vec![],
};
for _ in 0..children {
node.children.extend(Node::parse(input));
}
node.metadata.extend(input.take(metadata as usize));
Some(node)
}
fn metadata_sum_check(&self) -> usize {
let metadata = self.metadata.iter().cloned();
let children = self.children.iter();
metadata.sum::<usize>() + children.map(Node::metadata_sum_check).sum::<usize>()
}
fn weird_indexing_sum_check(&self) -> usize {
if self.children.is_empty() {
return self.metadata.iter().cloned().sum();
} else {
return self.metadata
.iter()
.cloned()
.filter(|n| *n > 0)
.map(|idx| {
match self.children.get(idx - 1) {
Some(child) => child.weird_indexing_sum_check(),
None => 0
}
}).sum()
}
}
}
fn main() {
let input = read_input();
let mut iter = input.iter().cloned();
let tree = Node::parse(&mut iter).expect("Unable to parse");
println!("Part 01: {}", part1(&tree));
println!("Part 02: {}", part2(&tree));
}
fn read_input() -> Input {
DATA
.trim()
.split(" ")
.map(|n| n.parse().expect("Unable to parse number"))
.collect()
}
fn part1(root: &Node) -> usize {
Node::metadata_sum_check(root)
}
fn part2(root: &Node) -> usize {
Node::weird_indexing_sum_check(root)
}
|
use std;
use std::error::Error;
use std::collections::HashMap;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
//Labels, HostConfig
pub struct Container {
pub id: String,
pub image: String,
pub status: String,
pub command: String,
pub created: u64,
pub names: Vec<String>,
pub ports: Vec<Port>,
#[serde(rename = "SizeRW")]
pub size_rw: Option<u64>, // I guess it is optional on Mac.
pub size_root_fs: Option<u64>,
pub labels: Option<HashMap<String, String>>,
pub host_config: HostConfig
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Port {
#[serde(rename = "IP")]
pub ip: Option<String>,
pub private_port: u64,
pub public_port: Option<u64>,
#[serde(rename = "type")]
pub ty: String
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct HostConfig {
pub network_mode: String
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct ContainerInfo {
pub app_armor_profile: String,
pub args: Vec<String>,
pub config: Config,
pub created: String,
pub driver: String,
// ExecIDs
// GraphDriver
// HostConfig
pub hostname_path: String,
pub hosts_path: String,
pub id: String,
pub image: String,
pub log_path: String,
pub mount_label: String,
pub mounts: Vec<Mount>,
pub name: String,
pub network_settings: NetworkSettings,
pub path: String,
pub process_label: String,
pub resolv_conf_path: String,
pub restart_count: u64,
pub state: State,
}
/// This type represents a `struct{}` in the Go code.
pub type UnspecifiedObject = HashMap<String, String>;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Config {
pub attach_stderr: bool,
pub attach_stdin: bool,
pub attach_stdout: bool,
// TODO: Verify that this is never just a `String`.
// pub Cmd: Vec<String>,
#[serde(rename = "Domainname")]
pub domain_name: String,
// TODO: The source says `Option<String>` but I've seen
// `Option<Vec<String>>` on the wire. Ignore until we figure it out.
// pub Entrypoint: Option<Vec<String>>,
pub env: Option<Vec<String>>,
pub exposed_ports: Option<HashMap<String, UnspecifiedObject>>,
pub hostname: String,
pub image: String,
pub labels: HashMap<String, String>,
// TODO: We don't know exacly what this vec contains.
// pub OnBuild: Option<Vec<???>>,
pub open_stdin: bool,
pub stdin_once: bool,
pub tty: bool,
pub lsnr: String,
pub volumes: Option<HashMap<String, UnspecifiedObject>>,
pub working_dir: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Mount {
// Name (optional)
// Driver (optional)
pub source: String,
pub destination: String,
pub mode: String,
#[serde(rename = "RW")]
pub rw: bool,
pub propogration: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkSettings {
pub bridge: String,
#[serde(rename = "EndpointID")]
pub endpoint_id: String,
pub gateway: String,
#[serde(rename = "GlobalIPv6Address")]
pub global_ipv6_address: String,
#[serde(rename = "GlobalIPv6PrefixLen")]
pub global_ipv6_prefix_len: u32,
pub hairpin_mode: bool,
#[serde(rename = "IPAddress")]
pub ip_address: String,
#[serde(rename = "IPPrefixLen")]
pub ip_prefix_len: u32,
#[serde(rename = "IPv6Gateway")]
pub ipv6_gateway: String,
#[serde(rename = "LinkLocalIPv6Address")]
pub link_local_ipv6_address: String,
#[serde(rename = "LinkLocalIPv6PrefixLen")]
pub link_local_ipv6_prefix_len: u32,
pub mac_address: String,
pub networks: HashMap<String, Network>,
pub ports: Option<HashMap<String, Option<Vec<PortMapping>>>>,
#[serde(rename = "SandboxID")]
pub sandbox_id: String,
pub sandbox_key: String,
// These two are null in the current output.
// pub SecondaryIPAddresses: ,
// pub SecondaryIPv6Addresses: ,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct Network {
pub aliases: Option<Vec<String>>,
#[serde(rename = "EndpointID")]
pub endpoint_id: String,
pub gateway: String,
#[serde(rename = "GlobalIPv6Address")]
pub global_ipv6_address: String,
#[serde(rename = "GlobalIPv6PrefixLen")]
pub global_ipv6_prefix_len: u32,
// pub IPAMConfig: ,
#[serde(rename = "IPAddress")]
pub ip_address: String,
#[serde(rename = "IPPrefixLen")]
pub ip_prefix_len: u32,
#[serde(rename = "IPv6Gateway")]
pub ipv6_gateway: String,
// pub Links:
pub mac_address: String,
#[serde(rename = "NetworkID")]
pub network_id: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct PortMapping {
pub host_ip: String,
pub host_port: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct State {
pub status: String,
pub running: bool,
pub paused: bool,
pub restarting: bool,
#[serde(rename = "OOMKilled")]
pub oom_killed: bool,
pub dead: bool,
// I don't know whether PIDs can be negative here. They're normally
// positive, but sometimes negative PIDs are used in certain APIs.
pub pid: i64,
pub exit_code: i64,
pub error: String,
pub started_at: String,
pub finished_at: String
}
impl std::fmt::Display for Container {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{}", self.id)
}
}
impl std::fmt::Display for ContainerInfo {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(f, "{}", self.id)
}
}
|
use std::sync::Arc;
use apllodb_storage_engine_interface::{
test_support::test_models::{Body, People, Pet},
Row,
};
use crate::{
aliaser::Aliaser,
records::{record::Record, record_schema::RecordSchema},
};
impl RecordSchema {
pub fn fx_people() -> Self {
Self::from_row_schema(&People::schema(), Aliaser::default())
}
pub fn fx_body() -> Self {
Self::from_row_schema(&Body::schema(), Aliaser::default())
}
pub fn fx_pet() -> Self {
Self::from_row_schema(&Pet::schema(), Aliaser::default())
}
}
impl Record {
pub fn fx_people1() -> Self {
Self::new(Arc::new(RecordSchema::fx_people()), Row::fx_people1())
}
pub fn fx_people2() -> Self {
Self::new(Arc::new(RecordSchema::fx_people()), Row::fx_people2())
}
pub fn fx_people3() -> Self {
Self::new(Arc::new(RecordSchema::fx_people()), Row::fx_people3())
}
pub fn fx_body1() -> Self {
Self::new(Arc::new(RecordSchema::fx_body()), Row::fx_body1())
}
pub fn fx_body3() -> Self {
Self::new(Arc::new(RecordSchema::fx_body()), Row::fx_body3())
}
pub fn fx_pet1() -> Self {
Self::new(Arc::new(RecordSchema::fx_pet()), Row::fx_pet1())
}
pub fn fx_pet3_1() -> Self {
Self::new(Arc::new(RecordSchema::fx_pet()), Row::fx_pet3_1())
}
pub fn fx_pet3_2() -> Self {
Self::new(Arc::new(RecordSchema::fx_pet()), Row::fx_pet3_2())
}
}
|
/**********************************************************\
| |
| hprose |
| |
| Official WebSite: http://www.hprose.com/ |
| http://www.hprose.org/ |
| |
\**********************************************************/
/**********************************************************\
* *
* io/formatter.rs *
* *
* io Formatter for Rust. *
* *
* LastModified: Oct 8, 2016 *
* Author: Chen Fei <cf@hprose.com> *
* *
\**********************************************************/
use super::*;
/// Serialize data
pub fn serialize<T: Encodable>(v: &T, simple: bool) -> Bytes {
let mut w = Writer::new(simple);
w.serialize(v);
w.into_bytes()
}
/// Marshal data
#[inline]
pub fn marshal<T: Encodable>(v: &T) -> Bytes {
serialize(v, true)
}
/// Unserialize data
pub fn unserialize<T: Decodable>(buf: &Bytes, simple: bool) -> DecodeResult<T> {
Reader::new(buf, simple).read_value()
}
/// Unmarshal data
#[inline]
pub fn unmarshal<T: Decodable>(buf: &Bytes) -> DecodeResult<T> {
unserialize(buf, true)
}
|
//! Support for collecting keys and counting the records that contributed them.
use std::fs::File;
use std::path::Path;
#[cfg(test)]
use std::mem::drop;
use arrow2::datatypes::{DataType, Field, Schema};
use arrow2::io::parquet::write::{FileWriter, WriteOptions};
use log::*;
use parquet2::write::Version;
use polars::prelude::*;
use super::index::IdIndex;
use crate::io::ObjectWriter;
use anyhow::Result;
/// A key collector accumulates keys and associates them with numeric identifiers.
///
/// This structure is designed to count the number of records referencing each key,
/// divided by label (such as different sources). It is optimized for use cases where
/// records are read label-by-label.
pub struct KeyCollector {
index: IdIndex<String>,
count_labels: Vec<String>,
counts: Vec<Vec<i32>>,
}
pub struct KeyCountAccum<'a> {
index: &'a mut IdIndex<String>,
counts: &'a mut Vec<i32>,
}
impl KeyCollector {
/// Create a new key collector.
pub fn new() -> KeyCollector {
KeyCollector {
index: IdIndex::new(),
count_labels: Vec::new(),
counts: Vec::new(),
}
}
pub fn len(&self) -> usize {
self.index.len()
}
/// Create an accumulator to count keys with a particular source label.
pub fn accum<'a>(&'a mut self, label: &str) -> KeyCountAccum<'a> {
let mut index = None;
for i in 0..self.count_labels.len() {
if self.count_labels[i] == label {
index = Some(i);
break;
}
}
let vec = if let Some(i) = index {
&mut self.counts[i]
} else {
let i = self.count_labels.len();
self.count_labels.push(label.to_string());
let mut vec = Vec::with_capacity(self.index.len());
vec.resize(self.len(), 0);
self.counts.push(vec);
&mut self.counts[i]
};
KeyCountAccum {
index: &mut self.index,
counts: vec,
}
}
/// Save to a Parquet file.
pub fn save<P: AsRef<Path>>(&mut self, key_col: &str, id_col: &str, path: P) -> Result<usize> {
info!("saving accumulated keys to {:?}", path.as_ref());
debug!("creating data frame");
let mut df = self.to_data_frame(id_col, key_col)?;
debug!("rechunking");
df.rechunk();
debug!("opening file");
let file = File::create(path)?;
let options = WriteOptions {
write_statistics: true,
version: Version::V2,
compression: parquet2::compression::CompressionOptions::Zstd(None),
data_pagesize_limit: None,
};
let mut writer = FileWriter::try_new(file, self.schema(id_col, key_col), options)?;
for chunk in df.iter_chunks() {
writer.write_object(chunk)?;
}
// FIXME use write_object
writer.end(None)?;
Ok(df.height())
}
/// Get an Arrow [Schema] for this key collector.
pub fn schema(&self, id_col: &str, key_col: &str) -> Schema {
let mut fields = vec![
Field::new(id_col, DataType::Int32, false),
Field::new(key_col, DataType::Utf8, false),
];
for l in &self.count_labels {
fields.push(Field::new(l, DataType::Int32, false))
}
Schema {
fields,
metadata: Default::default(),
}
}
/// Create an Polars [DataFrame] for this accumulator's data.
pub fn to_data_frame(&mut self, id_col: &str, key_col: &str) -> Result<DataFrame> {
let len = self.len();
let mut df = self.index.data_frame(id_col, key_col)?;
for i in 0..self.counts.len() {
let name = self.count_labels[i].as_str();
let counts = &mut self.counts[i];
counts.resize(len, 0);
let col = Int32Chunked::new(name, counts);
df.with_column(col)?;
}
Ok(df)
}
}
impl<'a> KeyCountAccum<'a> {
/// Add a key to this accumulator.
pub fn add_key(&mut self, key: &str) {
let pos = self.index.intern(key).expect("intern failure") as usize - 1;
if pos >= self.counts.len() {
self.counts.resize(pos + 1, 0);
}
self.counts[pos] += 1;
}
/// Add an iterator's worth of keys to this accumulator.
pub fn add_keys<I: Iterator>(&mut self, iter: I)
where
I::Item: AsRef<str>,
{
for s in iter {
let s = s.as_ref();
self.add_key(s);
}
}
}
#[test]
fn test_empty() {
let kc = KeyCollector::new();
assert_eq!(kc.index.len(), 0);
assert_eq!(kc.count_labels.len(), 0);
assert_eq!(kc.counts.len(), 0);
}
#[test]
fn test_acc_1() {
let mut kc = KeyCollector::new();
let mut acc = kc.accum("bob");
acc.add_key("wumpus");
assert_eq!(kc.index.len(), 1);
assert_eq!(kc.count_labels.len(), 1);
assert_eq!(kc.counts.len(), 1);
assert!(kc.index.lookup("wumpus").is_some());
assert_eq!(kc.count_labels[0], "bob");
assert_eq!(kc.counts[0][0], 1);
}
#[test]
fn test_acc_again() {
let mut kc = KeyCollector::new();
let mut acc = kc.accum("bob");
acc.add_key("wumpus");
drop(acc);
let mut ac2 = kc.accum("albert");
ac2.add_key("zzzz");
drop(ac2);
assert_eq!(kc.index.len(), 2);
assert_eq!(kc.count_labels.len(), 2);
assert_eq!(kc.counts.len(), 2);
assert!(kc.index.lookup("wumpus").is_some());
assert!(kc.index.lookup("zzzz").is_some());
assert_eq!(kc.count_labels[0], "bob");
assert_eq!(kc.counts[0][0], 1);
assert_eq!(kc.count_labels[1], "albert");
assert_eq!(kc.counts[1][1], 1);
}
|
pub use text_io::*;
|
// 1. start
// 2. poll until update finished
// 3. stop
use std::{thread, time};
use std::process::{Command, Stdio};
struct Dropbox();
#[derive(Debug, PartialEq)]
enum DropboxStatus {
NotRunning,
UpToDate,
Starting,
Connecting,
Else(String)
}
impl Dropbox {
fn run(&self, cmd: &str) -> String {
let child = Command::new("dropbox")
.arg(cmd)
.stdout(Stdio::piped())
.spawn()
.expect("Failed to get dropbox status");
let output = child.wait_with_output()
.expect("Failed to wait on child");
assert!(output.status.success());
String::from_utf8(output.stdout)
.expect("Failed to decode output").trim().to_string()
}
fn status(&self) -> DropboxStatus {
match self.run("status").as_str() {
s if s == "Dropbox isn't running!" => DropboxStatus::NotRunning,
s if s == "Connecting..." => DropboxStatus::Connecting,
s if s == "Starting..." => DropboxStatus::Starting,
s if s == "Up to date" => DropboxStatus::UpToDate,
s => DropboxStatus::Else(s.to_string())
}
}
fn is_running(&self) -> bool {
self.status() != DropboxStatus::NotRunning
}
fn start(&self) {
self.run("start");
assert!(self.is_running())
}
fn stop(&self) {
self.run("stop");
assert!(!self.is_running())
}
}
fn main() {
let db = Dropbox();
println!("Starting...");
db.start();
let mut status = db.status();
while status != DropboxStatus::UpToDate {
println!("Syncing... [{:?}]", status);
let t = time::Duration::from_millis(1000);
thread::sleep(t);
status = db.status();
}
println!("Stopping...");
db.stop();
println!("Done.");
}
|
use embedded_hal::digital::v2::{OutputPin, InputPin};
use stm32f1xx_hal::{pac, prelude::*, timer::Timer};
pub fn read_gpio() -> ! {
let dp = pac::Peripherals::take().unwrap();
// 关于RCC https://www.cnblogs.com/zc110747/p/4692379.html
let mut rcc = dp.RCC.constrain();
// 控制GPIO 需要使用 PCLK2:外设2区域时钟(通过APB2 Prescaler,最高72MHZ)
let mut gpioa = dp.GPIOA.split(&mut rcc.apb2);
let mut gpioc = dp.GPIOC.split(&mut rcc.apb2);
// crl 用于控制GPIO低8位(0~7)
// crh 用于控制GPIO高8位(8~15)
let pin = gpioa.pa0.into_floating_input(&mut gpioa.crl);
let mut led = gpioc.pc13.into_push_pull_output(&mut gpioc.crh);
loop {
if pin.is_high().unwrap() {
led.set_low().unwrap();
} else {
led.set_high().unwrap();
}
}
} |
mod container;
mod domain;
pub use self::domain::*;
pub use container::*;
|
use std::io::prelude::*;
use std::fs::File;
fn consume(mut file: File) -> String {
let mut s = String::new();
file.read_to_string(&mut s);
// Do something crazy with string and return it.
s
// file goes now out of scope and gets also closed!
}
fn main() {
let file = File::open("consume-file.rs").unwrap();
// Hand ownership of file to consume function.
let s = consume(file);
// file is now not available anymore!
println!("{}", s);
}
|
use day_21::{full_fight_won_by_player, get_weapons_armors_rings, Character, Item, ItemEnum};
use std::collections::HashMap;
use std::io::{self};
fn main() -> io::Result<()> {
let files_results = vec![("input.txt", 121, 201)];
for (f, result_1, result_2) in files_results.into_iter() {
println!("File: {}", f);
let file_content: Vec<String> = std::fs::read_to_string(f)?
.lines()
.map(|x| x.to_string())
.collect();
let mut values_map: HashMap<&str, i64> = HashMap::new();
for line in file_content.iter() {
let splitted: Vec<&str> = line.split(": ").collect();
values_map.insert(splitted[0], splitted[1].parse::<i64>().unwrap());
}
let boss = Character::from_map(&values_map);
let player = Character::new(100, 0, 0);
let weapons_content: Vec<String> = std::fs::read_to_string("items.txt")?
.lines()
.map(|x| x.to_string())
.collect();
let (weapons, armors, rings) = get_weapons_armors_rings(&weapons_content);
let mut current_min_cost = i64::MAX;
let mut current_max_cost = i64::MIN;
// Must wield a weapon
for weapon in weapons.iter() {
let mut player_w = player.clone();
player_w.add_weapon(weapon);
let cost = player_w.get_total_cost();
if full_fight_won_by_player(player_w.clone(), boss.clone()) {
if cost < current_min_cost {
current_min_cost = cost;
}
} else {
if cost > current_max_cost {
current_max_cost = cost;
}
}
for armor in armors.iter() {
let mut player_wa = player_w.clone();
player_wa.add_armor(armor);
let cost = player_wa.get_total_cost();
if full_fight_won_by_player(player_wa.clone(), boss.clone()) {
if cost < current_min_cost {
current_min_cost = cost;
}
} else {
if cost > current_max_cost {
current_max_cost = cost;
}
}
for ring1 in rings.iter() {
let mut player_war = player_wa.clone();
player_war.add_rings(&ItemEnum::Ring(*ring1, Item::zero_item()));
let cost = player_war.get_total_cost();
if full_fight_won_by_player(player_war.clone(), boss.clone()) {
if cost < current_min_cost {
current_min_cost = cost;
}
} else {
if cost > current_max_cost {
current_max_cost = cost;
}
}
for ring2 in rings.iter() {
if ring1 == ring2 {
continue;
}
let mut player_warr = player_war.clone();
player_warr.add_rings(&ItemEnum::Ring(*ring1, *ring2));
let cost = player_warr.get_total_cost();
if full_fight_won_by_player(player_warr.clone(), boss.clone()) {
if cost < current_min_cost {
current_min_cost = cost;
}
} else {
if cost > current_max_cost {
current_max_cost = cost;
}
}
}
}
}
}
println!("Min score: {}", current_min_cost);
assert_eq!(current_min_cost, result_1);
println!("Max score: {}", current_max_cost);
assert_eq!(current_max_cost, result_2);
}
Ok(())
}
|
/// Inserts two zero-bits before any two bits of `val`
pub fn expand_bits_by_3(mut val: u64) -> u64 {
val &= 0x1FFFFF; //Truncate to 21 bits
val = (val | (val << 32)) & 0x00FF00000000FFFF;
val = (val | (val << 16)) & 0x00FF0000FF0000FF;
val = (val | (val << 8)) & 0xF00F00F00F00F00F;
val = (val | (val << 4)) & 0x30C30C30C30C30C3;
val = (val | (val << 2)) & 0x1249249249249249;
val
}
const REVERSE_BITS_LOOKUP: [u8; 256] = [
0x00, 0x80, 0x40, 0xC0, 0x20, 0xA0, 0x60, 0xE0, 0x10, 0x90, 0x50, 0xD0, 0x30, 0xB0, 0x70, 0xF0,
0x08, 0x88, 0x48, 0xC8, 0x28, 0xA8, 0x68, 0xE8, 0x18, 0x98, 0x58, 0xD8, 0x38, 0xB8, 0x78, 0xF8,
0x04, 0x84, 0x44, 0xC4, 0x24, 0xA4, 0x64, 0xE4, 0x14, 0x94, 0x54, 0xD4, 0x34, 0xB4, 0x74, 0xF4,
0x0C, 0x8C, 0x4C, 0xCC, 0x2C, 0xAC, 0x6C, 0xEC, 0x1C, 0x9C, 0x5C, 0xDC, 0x3C, 0xBC, 0x7C, 0xFC,
0x02, 0x82, 0x42, 0xC2, 0x22, 0xA2, 0x62, 0xE2, 0x12, 0x92, 0x52, 0xD2, 0x32, 0xB2, 0x72, 0xF2,
0x0A, 0x8A, 0x4A, 0xCA, 0x2A, 0xAA, 0x6A, 0xEA, 0x1A, 0x9A, 0x5A, 0xDA, 0x3A, 0xBA, 0x7A, 0xFA,
0x06, 0x86, 0x46, 0xC6, 0x26, 0xA6, 0x66, 0xE6, 0x16, 0x96, 0x56, 0xD6, 0x36, 0xB6, 0x76, 0xF6,
0x0E, 0x8E, 0x4E, 0xCE, 0x2E, 0xAE, 0x6E, 0xEE, 0x1E, 0x9E, 0x5E, 0xDE, 0x3E, 0xBE, 0x7E, 0xFE,
0x01, 0x81, 0x41, 0xC1, 0x21, 0xA1, 0x61, 0xE1, 0x11, 0x91, 0x51, 0xD1, 0x31, 0xB1, 0x71, 0xF1,
0x09, 0x89, 0x49, 0xC9, 0x29, 0xA9, 0x69, 0xE9, 0x19, 0x99, 0x59, 0xD9, 0x39, 0xB9, 0x79, 0xF9,
0x05, 0x85, 0x45, 0xC5, 0x25, 0xA5, 0x65, 0xE5, 0x15, 0x95, 0x55, 0xD5, 0x35, 0xB5, 0x75, 0xF5,
0x0D, 0x8D, 0x4D, 0xCD, 0x2D, 0xAD, 0x6D, 0xED, 0x1D, 0x9D, 0x5D, 0xDD, 0x3D, 0xBD, 0x7D, 0xFD,
0x03, 0x83, 0x43, 0xC3, 0x23, 0xA3, 0x63, 0xE3, 0x13, 0x93, 0x53, 0xD3, 0x33, 0xB3, 0x73, 0xF3,
0x0B, 0x8B, 0x4B, 0xCB, 0x2B, 0xAB, 0x6B, 0xEB, 0x1B, 0x9B, 0x5B, 0xDB, 0x3B, 0xBB, 0x7B, 0xFB,
0x07, 0x87, 0x47, 0xC7, 0x27, 0xA7, 0x67, 0xE7, 0x17, 0x97, 0x57, 0xD7, 0x37, 0xB7, 0x77, 0xF7,
0x0F, 0x8F, 0x4F, 0xCF, 0x2F, 0xAF, 0x6F, 0xEF, 0x1F, 0x9F, 0x5F, 0xDF, 0x3F, 0xBF, 0x7F, 0xFF,
];
/// Reverse the bits in the given 64-bit value, turning the MSB into the LSB and vice versa
pub fn reverse_bits(val: u64) -> u64 {
((REVERSE_BITS_LOOKUP[(val & 0xFF) as usize] as u64) << 56)
| ((REVERSE_BITS_LOOKUP[((val >> 8) & 0xFF) as usize] as u64) << 48)
| ((REVERSE_BITS_LOOKUP[((val >> 16) & 0xFF) as usize] as u64) << 40)
| ((REVERSE_BITS_LOOKUP[((val >> 24) & 0xFF) as usize] as u64) << 32)
| ((REVERSE_BITS_LOOKUP[((val >> 32) & 0xFF) as usize] as u64) << 24)
| ((REVERSE_BITS_LOOKUP[((val >> 40) & 0xFF) as usize] as u64) << 16)
| ((REVERSE_BITS_LOOKUP[((val >> 48) & 0xFF) as usize] as u64) << 8)
| (REVERSE_BITS_LOOKUP[((val >> 56) & 0xFF) as usize] as u64)
}
|
use bimap::BiMap;
use std::iter::FromIterator;
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum Token {
Add,
Subtract,
Multiply,
Divide,
Modulo,
Not,
Greater,
Right,
Left,
Up,
Down,
Random,
HorizontalIf,
VerticalIf,
StringMode,
Duplicate,
Swap,
Discard,
PrintInt,
PrintChar,
Bridge,
Get,
Put,
ReadInt,
ReadChar,
Quit,
Int(u8),
Noop,
Char(char),
}
lazy_static! {
static ref CHAR_TOKEN_MAP: bimap::hash::BiHashMap<char, Token> = BiMap::from_iter(vec![
('+', Token::Add),
('-', Token::Subtract),
('*', Token::Multiply),
('/', Token::Divide),
('%', Token::Modulo),
('!', Token::Not),
('`', Token::Greater),
('>', Token::Right),
('<', Token::Left),
('^', Token::Up),
('v', Token::Down),
('?', Token::Random),
('_', Token::HorizontalIf),
('|', Token::VerticalIf),
('"', Token::StringMode),
(':', Token::Duplicate),
('\\',Token::Swap),
('$', Token::Discard),
('.', Token::PrintInt),
(',', Token::PrintChar),
('#', Token::Bridge),
('g', Token::Get),
('p', Token::Put),
('&', Token::ReadInt),
('~', Token::ReadChar),
('@', Token::Quit),
(' ', Token::Noop),
]);
}
pub fn token_to_char(token: &Token) -> char {
match token {
Token::Int(value) => (value + ('0' as u8)) as char,
Token::Char(value) => *value,
value => *CHAR_TOKEN_MAP.get_by_right(&value).unwrap(),
}
}
pub fn char_to_token(character: char) -> Token {
match character {
'0'..='9' => Token::Int(character.to_digit(10).unwrap() as u8),
value => match CHAR_TOKEN_MAP.get_by_left(&value) {
Some(c) => *c,
None => Token::Char(value),
}
}
}
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This is a basic event loop implementation not meant for any "real purposes"
//! other than testing the scheduler and proving that it's possible to have a
//! pluggable event loop.
//!
//! This implementation is also used as the fallback implementation of an event
//! loop if no other one is provided (and M:N scheduling is desired).
use std::mem;
use std::rt::exclusive::Exclusive;
use std::sync::Arc;
use std::sync::atomic;
use {EventLoop, RemoteCallback, PausableIdleCallback, Callback};
/// This is the only exported function from this module.
pub fn event_loop() -> Box<EventLoop + Send> {
box BasicLoop::new() as Box<EventLoop + Send>
}
struct BasicLoop {
work: Vec<proc(): Send>, // pending work
remotes: Vec<(uint, Box<Callback + Send>)>,
next_remote: uint,
messages: Arc<Exclusive<Vec<Message>>>,
idle: Option<Box<Callback + Send>>,
idle_active: Option<Arc<atomic::AtomicBool>>,
}
enum Message { RunRemote(uint), RemoveRemote(uint) }
impl BasicLoop {
fn new() -> BasicLoop {
BasicLoop {
work: vec![],
idle: None,
idle_active: None,
next_remote: 0,
remotes: vec![],
messages: Arc::new(Exclusive::new(Vec::new())),
}
}
/// Process everything in the work queue (continually)
fn work(&mut self) {
while self.work.len() > 0 {
for work in mem::replace(&mut self.work, vec![]).into_iter() {
work();
}
}
}
fn remote_work(&mut self) {
let messages = unsafe {
mem::replace(&mut *self.messages.lock(), Vec::new())
};
for message in messages.into_iter() {
self.message(message);
}
}
fn message(&mut self, message: Message) {
match message {
Message::RunRemote(i) => {
match self.remotes.iter_mut().find(|& &(id, _)| id == i) {
Some(&(_, ref mut f)) => f.call(),
None => unreachable!()
}
}
Message::RemoveRemote(i) => {
match self.remotes.iter().position(|&(id, _)| id == i) {
Some(i) => { self.remotes.remove(i).unwrap(); }
None => unreachable!()
}
}
}
}
/// Run the idle callback if one is registered
fn idle(&mut self) {
match self.idle {
Some(ref mut idle) => {
if self.idle_active.as_ref().unwrap().load(atomic::SeqCst) {
idle.call();
}
}
None => {}
}
}
fn has_idle(&self) -> bool {
self.idle.is_some() && self.idle_active.as_ref().unwrap().load(atomic::SeqCst)
}
}
impl EventLoop for BasicLoop {
fn run(&mut self) {
// Not exactly efficient, but it gets the job done.
while self.remotes.len() > 0 || self.work.len() > 0 || self.has_idle() {
self.work();
self.remote_work();
if self.has_idle() {
self.idle();
continue
}
unsafe {
let messages = self.messages.lock();
// We block here if we have no messages to process and we may
// receive a message at a later date
if self.remotes.len() > 0 && messages.len() == 0 &&
self.work.len() == 0 {
messages.wait()
}
}
}
}
fn callback(&mut self, f: proc():Send) {
self.work.push(f);
}
// FIXME: Seems like a really weird requirement to have an event loop provide.
fn pausable_idle_callback(&mut self, cb: Box<Callback + Send>)
-> Box<PausableIdleCallback + Send> {
rtassert!(self.idle.is_none());
self.idle = Some(cb);
let a = Arc::new(atomic::AtomicBool::new(true));
self.idle_active = Some(a.clone());
box BasicPausable { active: a } as Box<PausableIdleCallback + Send>
}
fn remote_callback(&mut self, f: Box<Callback + Send>)
-> Box<RemoteCallback + Send> {
let id = self.next_remote;
self.next_remote += 1;
self.remotes.push((id, f));
box BasicRemote::new(self.messages.clone(), id) as
Box<RemoteCallback + Send>
}
fn has_active_io(&self) -> bool { false }
}
struct BasicRemote {
queue: Arc<Exclusive<Vec<Message>>>,
id: uint,
}
impl BasicRemote {
fn new(queue: Arc<Exclusive<Vec<Message>>>, id: uint) -> BasicRemote {
BasicRemote { queue: queue, id: id }
}
}
impl RemoteCallback for BasicRemote {
fn fire(&mut self) {
let mut queue = unsafe { self.queue.lock() };
queue.push(Message::RunRemote(self.id));
queue.signal();
}
}
impl Drop for BasicRemote {
fn drop(&mut self) {
let mut queue = unsafe { self.queue.lock() };
queue.push(Message::RemoveRemote(self.id));
queue.signal();
}
}
struct BasicPausable {
active: Arc<atomic::AtomicBool>,
}
impl PausableIdleCallback for BasicPausable {
fn pause(&mut self) {
self.active.store(false, atomic::SeqCst);
}
fn resume(&mut self) {
self.active.store(true, atomic::SeqCst);
}
}
impl Drop for BasicPausable {
fn drop(&mut self) {
self.active.store(false, atomic::SeqCst);
}
}
#[cfg(test)]
mod test {
use std::rt::task::TaskOpts;
use basic;
use PoolConfig;
use SchedPool;
fn pool() -> SchedPool {
SchedPool::new(PoolConfig {
threads: 1,
event_loop_factory: basic::event_loop,
})
}
fn run(f: proc():Send) {
let mut pool = pool();
pool.spawn(TaskOpts::new(), f);
pool.shutdown();
}
#[test]
fn smoke() {
run(proc() {});
}
#[test]
fn some_channels() {
run(proc() {
let (tx, rx) = channel();
spawn(proc() {
tx.send(());
});
rx.recv();
});
}
#[test]
fn multi_thread() {
let mut pool = SchedPool::new(PoolConfig {
threads: 2,
event_loop_factory: basic::event_loop,
});
for _ in range(0u, 20) {
pool.spawn(TaskOpts::new(), proc() {
let (tx, rx) = channel();
spawn(proc() {
tx.send(());
});
rx.recv();
});
}
pool.shutdown();
}
}
|
//! A collection of various utility helpers.
mod comparison;
pub use self::comparison::*;
mod environment;
pub use self::environment::*;
mod iterators;
pub use self::iterators::CollectGroupBy;
mod mutability;
pub use self::mutability::*;
mod parallel;
pub use self::parallel::*;
mod random;
pub use self::random::*;
mod time_quota;
pub use self::time_quota::TimeQuota;
mod timing;
pub use self::timing::Timer;
mod types;
pub use self::types::Either;
|
use super::MapVisualization;
use super::Table;
impl<'c> Table<'c, MapVisualization> {
pub async fn by_dataset(&self, dataset: i32) -> Result<MapVisualization, sqlx::Error> {
sqlx::query_as("SELECT * FROM map_visualization WHERE dataset = $1")
.bind(dataset)
.fetch_one(&*self.pool)
.await
}
pub async fn by_id(&self, id: i32) -> Result<MapVisualization, sqlx::Error> {
sqlx::query_as("SELECT * FROM map_visualization WHERE id = $1")
.bind(id)
.fetch_one(&*self.pool)
.await
}
pub async fn all(&self) -> Result<Vec<MapVisualization>, sqlx::Error> {
sqlx::query_as("SELECT * FROM map_visualization")
.fetch_all(&*self.pool)
.await
}
}
|
//! Task State Segment
//!
//! When transitioning back to ring0, the processor must load a stack for the
//! kernel to use. This stack pointer is defined in the Task State Segment.
//!
//! Upon an interrupt or syscall the Task Register is read, which is used as an
//! offset into the GDT to find the TSS and use the rsp0 field as the kernel
//! stack.
//!
//! The TSS used to hold registers and other fields to facilitate hardware task
//! switching, but that's deprecated in AMD64.
use super::gdt::{GDT, TSS_OFFSET};
use super::stacks::{DEFAULT, NMI};
/// A wrapper around a Task State Segment
#[allow(dead_code)]
#[repr(packed)]
pub struct Tss {
_reserved0: u32,
rsp0: usize,
rsp1: usize,
rsp2: usize,
_reserved1: u32,
_reserved2: u32,
ist1: usize,
ist2: usize,
ist3: usize,
ist4: usize,
ist5: usize,
ist6: usize,
ist7: usize,
_reserved3: u32,
_reserved4: u32,
_reserved5: u16,
io_map: u16,
}
pub static mut TSS: Tss = Tss {
_reserved0: 0,
rsp0: 0,
rsp1: 0,
rsp2: 0,
_reserved1: 0,
_reserved2: 0,
ist1: 0,
ist2: 0,
ist3: 0,
ist4: 0,
ist5: 0,
ist6: 0,
ist7: 0,
_reserved3: 0,
_reserved4: 0,
_reserved5: 0,
io_map: 0,
};
/// Initializes the TSS and TR
///
/// Necessary to re-enter ring0
pub fn initialize() {
// GDT[6..8] contains the TSS segment.
// It's already been initialized with the proper size and flags, but
// we initialize the multi-part address fields here since we can't
// manipulate the tss ptr before linking.
unsafe {
TSS.rsp0 = DEFAULT.top();
TSS.ist1 = NMI.top();
let tss_ptr = &TSS as *const _ as usize;
GDT[6] |= (tss_ptr & 0x00ffffff) << 16; // 39:16
GDT[6] |= (tss_ptr & 0xff000000) << 32; // 63:56
GDT[7] = tss_ptr >> 32; // 95:64
// load TR with byte-offset into GDT for TSS
asm!("ltr ax" :: "{rax}"(TSS_OFFSET) :: "intel");
}
}
|
use std::collections::BitSet;
use incidence_vector::IncidenceVector;
/// [Incidence Matrix](http://mathworld.wolfram.com/IncidenceMatrix.html)
pub struct IncidenceMatrix {
/// Matrix dimensions in the form (m,n) or (# of rows, # of columns).
pub dims: (usize, usize),
entries: BitSet,
}
impl IncidenceMatrix {
/// Constructs IncidenceMatrix from linearly indexed entries.
pub fn from_vec(dims: (usize, usize), entries: &Vec<usize>) -> IncidenceMatrix {
let mut my_entries = BitSet::with_capacity(entries.len());
for entry in entries.iter() {
my_entries.insert(*entry);
}
IncidenceMatrix {
dims: dims,
entries: my_entries,
}
}
pub fn entries_ref(&self) -> &BitSet {
&self.entries
}
pub fn rows(&self) -> Vec<IncidenceVector> {
let (m,n) = self.dims;
(0..m).map(|i| {
let row_entries = self.entries.iter().filter(|x| x / m == i).map(|x| x - (i * n)).collect::<Vec<usize>>();
IncidenceVector::from_vec(n, row_entries)
}).collect()
}
pub fn columns(&self) -> Vec<IncidenceVector> {
let (m,n) = self.dims;
(0..n).map(|j| {
let column_entries = self.entries.iter().filter(|x| x % n == j).map(|x| x / n).collect::<Vec<usize>>();
IncidenceVector::from_vec(m, column_entries)
}).collect()
}
/// Matrix-Vector multiplication
pub fn mul_incidence_vector(&self, _rhs: &IncidenceVector) -> IncidenceVector {
let (m,n) = self.dims;
let len = _rhs.len;
assert_eq!(n, len);
let rows = self.rows();
IncidenceVector::from_vec(n, (0..m).filter(|&i| rows[i].dot(_rhs) > 0).collect())
}
}
#[test]
fn test_rows_one() {
let a = IncidenceMatrix::from_vec((2,2), &vec![0,3]);
let rows = a.rows();
assert_eq!(rows[0].index(&0), true);
assert_eq!(rows[0].index(&1), false);
assert_eq!(rows[1].index(&0), false);
assert_eq!(rows[1].index(&1), true);
}
#[test]
fn test_rows_two() {
let a = IncidenceMatrix::from_vec((2,2), &vec![1,3]);
let rows = a.rows();
assert_eq!(rows[0].index(&0), false);
assert_eq!(rows[0].index(&1), true);
assert_eq!(rows[1].index(&0), false);
assert_eq!(rows[1].index(&1), true);
}
#[test]
fn test_columns_one() {
let a = IncidenceMatrix::from_vec((2,2), &vec![0,3]);
let columns = a.columns();
assert_eq!(columns[0].index(&0), true);
assert_eq!(columns[0].index(&1), false);
assert_eq!(columns[1].index(&0), false);
assert_eq!(columns[1].index(&1), true);
}
#[test]
fn test_columns_two() {
let a = IncidenceMatrix::from_vec((2,2), &vec![1,3]);
let columns = a.columns();
assert_eq!(columns[0].index(&0), false);
assert_eq!(columns[0].index(&1), false);
assert_eq!(columns[1].index(&0), true);
assert_eq!(columns[1].index(&1), true);
}
|
use yew::prelude::*;
use crate::components::card::Card;
#[function_component(NotFound)]
pub fn not_found() -> Html {
use_context::<Callback<String>>()
.unwrap()
.emit("找不到页面".into());
html! {
<Card title={"Welcome!"}>
<p>{ "404 NOT FOUND,换个地址试试看?" }</p>
</Card>
}
} |
use std::fmt;
use crate::Coord;
#[derive(Eq, PartialEq, Hash, Debug, Copy, Clone, Ord, PartialOrd)]
pub struct Area {
pub position: Coord,
pub size: Coord,
}
impl Area {
pub fn new(position: Coord, size: Coord) -> Area {
Area { position, size }
}
pub fn center(&self) -> Coord {
(
self.position.x + (self.size.x / 2),
self.position.y + (self.size.y / 2),
)
.into()
}
pub fn point_within(&self, point: Coord) -> bool {
self.position.x <= point.x
&& point.x < self.position.x + self.size.x
&& self.position.y <= point.y
&& point.y < self.position.y + self.size.y
}
pub fn area_within(&self, area: Area) -> bool {
self.position.x <= area.position.x
&& self.position.y <= area.position.y
&& self.position.x + self.size.x >= area.position.x + area.size.x
&& self.position.y + self.size.y >= area.position.y + area.size.y
}
pub fn overlaps(&self, other: Area) -> bool {
self.point_within(other.position)
|| self.point_within(other.position + (other.size.x, 0).into())
|| self.point_within(other.position + (0, other.size.y).into())
|| self.point_within(other.position + other.size)
|| self.point_within(other.center())
|| self.position.x >= other.position.x
&& self.position.y >= other.position.y
&& self.position.x <= other.position.x + other.size.y
&& self.position.y <= other.position.y + other.size.x
|| self.position.x + self.size.y >= other.position.x
&& self.position.y + self.size.x >= other.position.y
&& self.position.x + self.size.y <= other.position.x + other.size.y
&& self.position.y + self.size.x <= other.position.y + other.size.x
}
pub fn iter(&self) -> AreaIter {
AreaIter {
pos: (0, 0).into(),
size: self.size,
position: self.position,
}
}
}
impl fmt::Display for Area {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{} -> {}]", self.position, self.position + self.size)
}
}
#[derive(Eq, PartialEq, Hash, Debug, Copy, Clone, Ord, PartialOrd)]
pub struct AreaIter {
pub pos: Coord,
pub size: Coord,
pub position: Coord,
}
impl Iterator for AreaIter {
type Item = Coord;
fn next(&mut self) -> Option<Self::Item> {
if self.pos.x <= self.size.x && self.pos.y <= self.size.y {
if self.pos.x > self.size.x {
self.pos.x = 0;
self.pos.y += 1;
}
let ret = Some(self.pos + self.position);
self.pos.x += 1;
if self.pos.x > self.size.x {
self.pos.x = 0;
self.pos.y += 1;
}
ret
} else {
None
}
}
}
|
#![feature(futures_api, async_await, await_macro)]
mod api;
pub mod config;
use {crate::config::Opt, tide::App};
pub fn make_app(opt: Opt) -> App<Opt> {
let mut app = tide::App::new(opt);
app.middleware(tide::middleware::RootLogger::new());
app.at("/ping").get(async move |_| "OK");
app.at("/hello/:name")
.get(api::hello)
.post(api::hello_with_body);
app
}
|
//! This project is used for explaining the linear phase property of digital
//! filters. Here we have a low-pass filter represented by h array. First its
//! FFT is calculated using the arm_cfft_f32 function. Then the magnitude and
//! phase of the FFT are stored in Mag and Phase arrays.
//!
//! Runs entirely locally without hardware. Rounding might be different than on
//! device. Except for when printing you must be vigilent to not become reliant
//! on any std tools that can't otherwise port over no no_std without alloc.
//!
//! `cargo run --example 4_14_linear_phase_calculations`
use textplots::{Chart, Plot, Shape};
use core::f32::consts::PI;
use itertools::Itertools;
use microfft::{complex::cfft_64, Complex32};
use typenum::Unsigned;
type N = heapless::consts::U64;
fn main() {
// Complex impulse response of filter
let mut dtfsecoef = H
.iter()
.cloned()
.map(|h| Complex32 { re: h, im: 0.0 })
.collect::<heapless::Vec<Complex32, N>>();
let _ = cfft_64(&mut dtfsecoef[..]);
// Magnitude calculation
let mag = dtfsecoef
.iter()
.map(|complex| (complex.re * complex.re + complex.im * complex.im).sqrt())
.collect::<heapless::Vec<f32, N>>();
display::<N, _>("mag", mag.iter().cloned());
let phase = dtfsecoef
.iter()
.cloned()
.map(|complex| complex.re.atan2(complex.im));
// not sure why yet, but this is how they display in the matlab file
let phase_graph = phase
.clone()
.enumerate()
.map(|(i, phase)| if i < 33 { phase } else { phase - PI });
display::<N, _>("phase", phase_graph.clone());
}
// Points isn't a great representation as you can lose the line in the graph,
// however while Lines occasionally looks good it also can be terrible.
// Continuous requires to be in a fn pointer closure which cant capture any
// external data so not useful without lots of code duplication.
fn display<N, I>(name: &str, input: I)
where
N: Unsigned,
I: Iterator<Item = f32> + core::clone::Clone + std::fmt::Debug,
{
println!("{:?}: {:.4?}", name, input.clone().format(", "));
let display = input
.enumerate()
.map(|(n, y)| (n as f32, y))
.collect::<Vec<(f32, f32)>>();
Chart::new(120, 60, 0.0, N::to_usize() as f32)
.lineplot(Shape::Lines(&display[..]))
.display();
}
// linear_phase_FIR_coefficients
#[rustfmt::skip]
static H: &[f32] = &[
0.002_110_571_8, 0.003_037_402_2, 0.004_010_573, 0.005_026_416_4, 0.006_080_887_7,
0.007_169_586_6, 0.008_287_783, 0.009_430_443, 0.010_592_262, 0.011_767_695,
0.012_950_993, 0.014_136_244, 0.015_317_405, 0.016_488_347, 0.017_642_902,
0.018774895, 0.019_878_196, 0.020_946_754, 0.021_974_655, 0.022_956_148,
0.023_885_697, 0.024_758_019, 0.025_568_118, 0.026_311_33, 0.026_983_349,
0.027_580_261, 0.028_098_583, 0.028_535_27, 0.028_887_754, 0.029_153_956,
0.029_332_304, 0.029_421_745, 0.029_421_745, 0.029_332_304, 0.029_153_956,
0.028_887_754, 0.028_535_27, 0.028_098_583, 0.027_580_261, 0.026_983_349,
0.026_311_33, 0.025_568_118, 0.024_758_019, 0.023_885_697, 0.022_956_148,
0.021_974_655, 0.020_946_754, 0.019_878_196, 0.018774895, 0.017_642_902,
0.016_488_347, 0.015_317_405, 0.014_136_244, 0.012_950_993, 0.011_767_695,
0.010_592_262, 0.009_430_443, 0.008_287_783, 0.007_169_586_6, 0.006_080_887_7,
0.005_026_416_4, 0.004_010_573, 0.003_037_402_2, 0.002_110_571_8
];
|
use std::net::SocketAddr;
use smoltcp::{
wire::{
Ipv4Packet, Ipv6Packet, TcpPacket, IpProtocol as Protocol,
},
};
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
pub type IdAddrs = (SocketAddr, SocketAddr);
#[derive(Debug, Clone)]
/// Convenience wrapper around IPv4/IPv6 packet as single unit
pub enum Packet {
V4(Ipv4Packet<Vec<u8>>),
V6(Ipv6Packet<Vec<u8>>),
}
impl Packet {
/// Build new (semi-universal) packet from raw buffer, from *correct* IPv(4/6) + TCP packet
/// No other protocols are supported
pub fn new(buf: &[u8]) -> Option<Self> {
if buf.len() == 0 {
return None;
}
let ver = buf[0] >> 4;
if ver == 4 {
let packet = Ipv4Packet::new_checked(buf).ok()?;
if packet.protocol() != Protocol::Tcp {
return None;
}
Some(Self::V4(Ipv4Packet::new_unchecked(buf.to_vec())))
} else if ver == 6 {
let packet = Ipv6Packet::new_checked(buf).ok()?;
if packet.next_header() != Protocol::Tcp {
return None;
}
Some(Self::V6(Ipv6Packet::new_unchecked(buf.to_vec())))
} else {
None
}
}
/// Get raw buffer of this packet
pub fn ip_buffer(&self) -> &[u8] {
match self {
Self::V4(ref packet) => packet.as_ref(),
Self::V6(ref packet) => packet.as_ref(),
}
}
pub fn tcp_buffer(&self) -> &[u8] {
match self {
Self::V4(_) => Ipv4Packet::new_unchecked(self.ip_buffer()).payload(),
Self::V6(_) => Ipv6Packet::new_unchecked(self.ip_buffer()).payload(),
}
}
/// Get Tcp packet (without IP headers) from this IP packet.
pub fn tcp_packet(&self) -> TcpPacket<&[u8]> {
TcpPacket::new_unchecked(self.tcp_buffer())
}
#[inline]
/// Get Socket address (IP address + TCP port number) of source generating this packet
pub fn source_addr(&self) -> SocketAddr {
let port = self.tcp_packet().src_port();
match self {
Self::V4(ref packet) => SocketAddr::new(packet.src_addr().0.into(), port),
Self::V6(ref packet) => SocketAddr::new(packet.src_addr().0.into(), port),
}
}
#[inline]
/// Get Socket address (IP address + TCP port number) of source generating this packet
pub fn destination_address(&self) -> SocketAddr {
let port = self.tcp_packet().src_port();
match self {
Self::V4(ref packet) => SocketAddr::new(packet.dst_addr().0.into(), port),
Self::V6(ref packet) => SocketAddr::new(packet.dst_addr().0.into(), port),
}
}
#[inline]
/// Socket Address identifying specific packet chain
pub fn identification_pair(&self) -> IdAddrs {
let mut hasher = DefaultHasher::new();
self.source_addr().hash(&mut hasher);
let sh = hasher.finish();
let mut hasher = DefaultHasher::new();
self.destination_address().hash(&mut hasher);
let dh = hasher.finish();
if sh < dh {
(self.source_addr(), self.destination_address())
} else {
(self.destination_address(), self.source_addr())
}
}
#[inline]
/// Get raw payload buffer of this packet (Without TCP or IP headers)
pub fn payload(&self) -> &[u8] {
self.tcp_packet().payload()
}
#[inline]
/// Check if packet has any (non-header related) payload
pub fn has_payload(&self) -> bool {
self.payload().len() > 0
}
#[inline]
/// Check if is push (PSH) flag set in TCP header
pub fn is_push(&self) -> bool {
self.tcp_packet().psh()
}
#[inline]
/// Check if is reset (RST) flag set in TCP header
pub fn is_reset(&self) -> bool {
self.tcp_packet().rst()
}
#[inline]
/// Check if is finish (FIN) flag set in TCP header
pub fn is_finish(&self) -> bool {
self.tcp_packet().fin()
}
#[inline]
/// Check if this packet closes connection
pub fn is_closing(&self) -> bool {
self.is_reset() || self.is_finish()
}
#[inline]
/// Check if this packet has IPv4 header
pub fn is_ipv4(&self) -> bool {
if let Self::V4(_) = self {
true
} else {
false
}
}
#[inline]
/// Check if this packet has IPv6 header
pub fn is_ipv6(&self) -> bool {
!self.is_ipv4()
}
} |
mod dataset;
mod itemset;
mod datasets;
use std::{
collections::HashSet,
fmt::Debug,
hash::Hash
};
use crate::{DataSet, Support};
use self::datasets::TestDataSet;
/// Test the sequential implementation with the given test dataset.
fn test_sequential<D>(dataset: &TestDataSet<D>)
where
D: DataSet,
D::ItemSet: Debug + Eq + Hash,
for<'b> &'b D::ItemSet: IntoIterator<Item = usize>,
{
let result: HashSet<(D::ItemSet, Support)> = crate::sequential
::closed(&dataset.dataset, dataset.min_sup)
.into_vec() // Boxed slice has no owned iterator.
.into_iter()
.collect();
assert_eq!(result, dataset.result)
}
/// Test the parallel implementation with the given test dataset.
fn test_parallel<D>(dataset: &TestDataSet<D>)
where
D: DataSet + Sync,
D::ItemSet: Debug + Eq + Hash + Send + Sync,
for<'b> &'b D::ItemSet: IntoIterator<Item = usize>,
{
let result: HashSet<(D::ItemSet, Support)> = crate::parallel
::closed(&dataset.dataset, dataset.min_sup)
.into_vec() // Boxed slice has no owned iterator.
.into_iter()
.collect();
assert_eq!(result, dataset.result)
}
#[test]
fn test_toy() {
let dataset = &datasets::TOY;
test_sequential(dataset);
test_parallel(dataset);
}
|
//! AT&T Assembler Syntax Tree.
#![feature(try_from)]
#[macro_use]
extern crate pest_derive;
extern crate pest;
#[macro_use]
pub mod parser;
pub mod ast;
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsDevice : Object describing the device used to perform the Synthetic test.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SyntheticsDevice {
/// Screen height of the device.
#[serde(rename = "height")]
pub height: i64,
#[serde(rename = "id")]
pub id: crate::models::SyntheticsDeviceId,
/// Whether or not the device is a mobile.
#[serde(rename = "isMobile", skip_serializing_if = "Option::is_none")]
pub is_mobile: Option<bool>,
/// The device name.
#[serde(rename = "name")]
pub name: String,
/// Screen width of the device.
#[serde(rename = "width")]
pub width: i64,
}
impl SyntheticsDevice {
/// Object describing the device used to perform the Synthetic test.
pub fn new(height: i64, id: crate::models::SyntheticsDeviceId, name: String, width: i64) -> SyntheticsDevice {
SyntheticsDevice {
height,
id,
is_mobile: None,
name,
width,
}
}
}
|
use rocket_contrib::{Json, Value};
use models::Hero;
#[post("/", data = "<hero>")]
pub fn create(hero: Json<Hero>) -> Json<Hero> {
hero
}
#[get("/")]
pub fn get_all() -> Json<Value> {
Json(json!([
"hero 1",
"hero 2"
]))
}
#[put("/<id>", data = "<hero>")]
pub fn update(id: u32, hero: Json<Hero>) -> Json<Hero> {
hero
}
#[delete("/<id>")]
pub fn delete(id: u32) -> Json<Value> {
Json(json!({"status": "ok"}))
} |
//! All functions in this module operate on raw bytes for performance reasons.
//! It is easy to combine these with `std::str::from_utf8` family of functions,
//! to lift them to operate on `str`.
use crate::Osm;
use std::ops::Range;
/// Returns an iterator over tags specified by `range`.
///
/// When searching for a tag by key consider to use `find_tag` which
/// performs better.
#[inline]
pub fn iter_tags(archive: &Osm, range: Range<u64>) -> impl Iterator<Item = (&[u8], &[u8])> + Clone {
let tags = archive.tags();
let tags_index = archive.tags_index();
let strings = archive.stringtable();
range.map(move |idx| {
let tag = &tags[tags_index[idx as usize].value() as usize];
let key = strings.substring_raw(tag.key_idx() as usize);
let val = strings.substring_raw(tag.value_idx() as usize);
(key, val)
})
}
/// Finds the first tag in the given `range` which satisfies the predicate
/// applied to the key and value and returns the corresponding value.
///
/// Note that the predicate function is called on the whole key block and value
/// block. These are zero (`\0`) divided blocks of bytes that start at the key
/// resp. value, and contain the rest string data. In particular, the len of
/// the block is *not* the len of the key resp. value. The user is responsible
/// to check or find the zero terminator.
#[inline]
pub fn find_tag_by(
archive: &Osm,
mut range: Range<u64>,
mut predicate: impl FnMut(&[u8], &[u8]) -> bool,
) -> Option<&[u8]> {
let tags = archive.tags();
let tags_index = archive.tags_index();
let strings = archive.stringtable();
range.find_map(move |idx| {
let tag = &tags[tags_index[idx as usize].value() as usize];
let key_block = &strings.as_bytes()[tag.key_idx() as usize..];
let value_block = &strings.as_bytes()[tag.value_idx() as usize..];
if predicate(key_block, value_block) {
Some(strings.substring_raw(tag.value_idx() as usize))
} else {
None
}
})
}
/// Finds a tag by its key in the given `range` and returns the corresponding
/// value.
#[inline]
pub fn find_tag<'a>(archive: &'a Osm, range: Range<u64>, key: &[u8]) -> Option<&'a [u8]> {
find_tag_by(archive, range, |key_block, _| {
key_block.starts_with(key) && *key_block.get(key.len()).unwrap_or(&0) == 0
})
}
/// Checks if there is a tag in `range` with a given `key` and `value`.
#[inline]
pub fn has_tag(archive: &Osm, range: Range<u64>, key: &[u8], value: &[u8]) -> bool {
let tags = archive.tags();
let tags_index = archive.tags_index();
let strings = archive.stringtable();
let matches = |idx, value| {
let block = &strings.as_bytes()[idx as usize..];
block.starts_with(value) && *block.get(value.len()).unwrap_or(&0) == 0
};
for idx in range {
let tag = &tags[tags_index[idx as usize].value() as usize];
if matches(tag.key_idx(), key) {
return matches(tag.value_idx(), value);
}
}
false
}
|
use std::thread;
fn f1 (a : u64, b : u64, c : u64, d : u64) -> u64 {
let mut primitive_count = 0;
for triangle in [[a - 2*b + 2*c, 2*a - b + 2*c, 2*a - 2*b + 3*c],
[a + 2*b + 2*c, 2*a + b + 2*c, 2*a + 2*b + 3*c],
[2*b + 2*c - a, b + 2*c - 2*a, 2*b + 3*c - 2*a]] .iter() {
let l = triangle[0] + triangle[1] + triangle[2];
if l > d { continue; }
primitive_count += 1 + f1(triangle[0], triangle[1], triangle[2], d);
}
primitive_count
}
fn f2 (a : u64, b : u64, c : u64, d : u64) -> u64 {
let mut triplet_count = 0;
for triangle in [[a - 2*b + 2*c, 2*a - b + 2*c, 2*a - 2*b + 3*c],
[a + 2*b + 2*c, 2*a + b + 2*c, 2*a + 2*b + 3*c],
[2*b + 2*c - a, b + 2*c - 2*a, 2*b + 3*c - 2*a]] .iter() {
let l = triangle[0] + triangle[1] + triangle[2];
if l > d { continue; }
triplet_count += (d/l) + f2(triangle[0], triangle[1], triangle[2], d);
}
triplet_count
}
fn main () {
let new_th_1 = thread::Builder::new().stack_size(32 * 1024 * 1024).spawn (move || {
let mut i = 100;
while i <= 100_000_000_000 {
println!(" Primitive triples below {} : {}", i, f1(3, 4, 5, i) + 1);
i *= 10;
}
}).unwrap();
let new_th_2 =thread::Builder::new().stack_size(32 * 1024 * 1024).spawn (move || {
let mut i = 100;
while i <= 100_000_000_000 {
println!(" Triples below {} : {}", i, f2(3, 4, 5, i) + i/12);
i *= 10;
}
}).unwrap();
new_th_1.join().unwrap();
new_th_2.join().unwrap();
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod accounts {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
) -> std::result::Result<MapsAccount, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: MapsAccount = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
maps_account_create_parameters: &MapsAccountCreateParameters,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(maps_account_create_parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: MapsAccount = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: MapsAccount = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(MapsAccount),
Created201(MapsAccount),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
maps_account_update_parameters: &MapsAccountUpdateParameters,
) -> std::result::Result<MapsAccount, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(maps_account_update_parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: MapsAccount = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<MapsAccounts, list_by_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: MapsAccounts = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
list_by_resource_group::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<MapsAccounts, list_by_subscription::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Maps/accounts",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_subscription::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_subscription::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_subscription::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: MapsAccounts = serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
list_by_subscription::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
) -> std::result::Result<MapsAccountKeys, list_keys::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/listKeys",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_keys::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0);
let req = req_builder.build().context(list_keys::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_keys::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_keys::ResponseBytesError)?;
let rsp_value: MapsAccountKeys = serde_json::from_slice(&body).context(list_keys::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_keys::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_keys::DeserializeError { body })?;
list_keys::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_keys {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn regenerate_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
key_specification: &MapsKeySpecification,
) -> std::result::Result<MapsAccountKeys, regenerate_keys::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/regenerateKey",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(regenerate_keys::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(key_specification);
let req = req_builder.build().context(regenerate_keys::BuildRequestError)?;
let rsp = client.execute(req).await.context(regenerate_keys::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(regenerate_keys::ResponseBytesError)?;
let rsp_value: MapsAccountKeys = serde_json::from_slice(&body).context(regenerate_keys::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(regenerate_keys::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(regenerate_keys::DeserializeError { body })?;
regenerate_keys::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod regenerate_keys {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod maps {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_operations(operation_config: &crate::OperationConfig) -> std::result::Result<MapsOperations, list_operations::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.Maps/operations", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_operations::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_operations::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_operations::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_operations::ResponseBytesError)?;
let rsp_value: MapsOperations = serde_json::from_slice(&body).context(list_operations::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_operations::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_operations::DeserializeError { body })?;
list_operations::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_operations {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod private_atlases {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
private_atlas_name: &str,
) -> std::result::Result<PrivateAtlas, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/privateAtlases/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, private_atlas_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: PrivateAtlas = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
private_atlas_name: &str,
private_atlas_create_parameters: &PrivateAtlasCreateParameters,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/privateAtlases/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, private_atlas_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(private_atlas_create_parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: PrivateAtlas = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: PrivateAtlas = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(PrivateAtlas),
Created201(PrivateAtlas),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
private_atlas_name: &str,
private_atlas_update_parameters: &PrivateAtlasUpdateParameters,
) -> std::result::Result<PrivateAtlas, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/privateAtlases/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, private_atlas_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(private_atlas_update_parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: PrivateAtlas = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
private_atlas_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/privateAtlases/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, private_atlas_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_account(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
) -> std::result::Result<PrivateAtlasList, list_by_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/privateAtlases",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_account::ResponseBytesError)?;
let rsp_value: PrivateAtlasList = serde_json::from_slice(&body).context(list_by_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_account::DeserializeError { body })?;
list_by_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod creators {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list_by_account(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
) -> std::result::Result<CreatorList, list_by_account::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/creators",
&operation_config.base_path, subscription_id, resource_group_name, account_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_account::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_account::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_account::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_account::ResponseBytesError)?;
let rsp_value: CreatorList = serde_json::from_slice(&body).context(list_by_account::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_account::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_account::DeserializeError { body })?;
list_by_account::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_account {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
creator_name: &str,
) -> std::result::Result<Creator, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/creators/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, creator_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: Creator = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
creator_name: &str,
creator_create_parameters: &CreatorCreateParameters,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/creators/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, creator_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(creator_create_parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: Creator = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: Creator = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(Creator),
Created201(Creator),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
creator_name: &str,
creator_update_parameters: &CreatorUpdateParameters,
) -> std::result::Result<Creator, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/creators/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, creator_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(creator_update_parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: Creator = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
account_name: &str,
creator_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Maps/accounts/{}/creators/{}",
&operation_config.base_path, subscription_id, resource_group_name, account_name, creator_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::ErrorResponse,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
|
use core::arch::x86_64::*;
use std::mem;
/// Create PoT proof with checkpoints
#[target_feature(enable = "aes")]
pub(super) unsafe fn create(
seed: &[u8; 16],
key: &[u8; 16],
num_checkpoints: u8,
checkpoint_iterations: u32,
) -> Vec<[u8; 16]> {
let mut checkpoints = Vec::with_capacity(usize::from(num_checkpoints));
let keys_reg = expand_key(key);
let xor_key = _mm_xor_si128(keys_reg[10], keys_reg[0]);
let mut seed_reg = _mm_loadu_si128(seed.as_ptr() as *const __m128i);
seed_reg = _mm_xor_si128(seed_reg, keys_reg[0]);
for _ in 0..num_checkpoints {
for _ in 0..checkpoint_iterations {
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[1]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[2]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[3]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[4]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[5]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[6]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[7]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[8]);
seed_reg = _mm_aesenc_si128(seed_reg, keys_reg[9]);
seed_reg = _mm_aesenclast_si128(seed_reg, xor_key);
}
let checkpoint_reg = _mm_xor_si128(seed_reg, keys_reg[0]);
let mut checkpoint: [u8; 16] = mem::zeroed();
_mm_storeu_si128(checkpoint.as_mut_ptr() as *mut __m128i, checkpoint_reg);
checkpoints.push(checkpoint);
}
checkpoints
}
// Below code copied with minor changes from following place under MIT/Apache-2.0 license by Artyom
// Pavlov:
// https://github.com/RustCrypto/block-ciphers/blob/9413fcadd28d53854954498c0589b747d8e4ade2/aes/src/ni/aes128.rs
/// AES-128 round keys
type RoundKeys = [__m128i; 11];
macro_rules! expand_round {
($keys:expr, $pos:expr, $round:expr) => {
let mut t1 = $keys[$pos - 1];
let mut t2;
let mut t3;
t2 = _mm_aeskeygenassist_si128(t1, $round);
t2 = _mm_shuffle_epi32(t2, 0xff);
t3 = _mm_slli_si128(t1, 0x4);
t1 = _mm_xor_si128(t1, t3);
t3 = _mm_slli_si128(t3, 0x4);
t1 = _mm_xor_si128(t1, t3);
t3 = _mm_slli_si128(t3, 0x4);
t1 = _mm_xor_si128(t1, t3);
t1 = _mm_xor_si128(t1, t2);
$keys[$pos] = t1;
};
}
#[target_feature(enable = "aes")]
unsafe fn expand_key(key: &[u8; 16]) -> RoundKeys {
// SAFETY: `RoundKeys` is a `[__m128i; 11]` which can be initialized
// with all zeroes.
let mut keys: RoundKeys = mem::zeroed();
let k = _mm_loadu_si128(key.as_ptr() as *const __m128i);
keys[0] = k;
expand_round!(keys, 1, 0x01);
expand_round!(keys, 2, 0x02);
expand_round!(keys, 3, 0x04);
expand_round!(keys, 4, 0x08);
expand_round!(keys, 5, 0x10);
expand_round!(keys, 6, 0x20);
expand_round!(keys, 7, 0x40);
expand_round!(keys, 8, 0x80);
expand_round!(keys, 9, 0x1B);
expand_round!(keys, 10, 0x36);
keys
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
pub mod file_util;
pub use file_util::*;
#[allow(clippy::module_inception)]
pub mod utils;
pub use utils::*;
pub mod bit_vec_extension;
pub use bit_vec_extension::*;
pub mod rayon_util;
pub use rayon_util::*;
pub mod timer;
pub use timer::*;
pub mod cached_reader;
pub use cached_reader::*;
pub mod cached_writer;
pub use cached_writer::*;
pub mod partition;
pub use partition::*;
pub mod math_util;
pub use math_util::*;
pub mod kmeans;
pub use kmeans::*;
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use rocksdb::SliceTransform;
pub struct FixedPrefixSliceTransform {
pub prefix_len: usize,
}
impl FixedPrefixSliceTransform {
pub fn new(prefix_len: usize) -> FixedPrefixSliceTransform {
FixedPrefixSliceTransform { prefix_len }
}
}
impl SliceTransform for FixedPrefixSliceTransform {
fn transform<'a>(&mut self, key: &'a [u8]) -> &'a [u8] {
&key[..self.prefix_len]
}
fn in_domain(&mut self, key: &[u8]) -> bool {
key.len() >= self.prefix_len
}
fn in_range(&mut self, _: &[u8]) -> bool {
true
}
}
pub struct NoopSliceTransform;
impl SliceTransform for NoopSliceTransform {
fn transform<'a>(&mut self, key: &'a [u8]) -> &'a [u8] {
key
}
fn in_domain(&mut self, _: &[u8]) -> bool {
true
}
fn in_range(&mut self, _: &[u8]) -> bool {
true
}
}
|
use std::collections::HashMap;
use super::type_table::{TypeTableEntry, TypeTableType};
pub struct GlobalMessage {
table: TypeTableType
}
impl TypeTableEntry for GlobalMessage {
fn get(&self, key: u16) -> String {
let result = self.table.get(&key);
match result {
Some(r) => r.clone(),
None => key.to_string().clone()
}
}
}
impl GlobalMessage {
pub fn new() -> GlobalMessage {
let global_table = GlobalMessage::make_global_table();
return GlobalMessage {
table: global_table
}
}
// pub fn get(&self, key: u16) -> &'static str {
// let result = self.table.get(&key);
// match result {
// Some(r) => return r,
// None => panic!("Unregcognized Global Message Number: {}", key)
// };
// }
fn make_global_table() -> TypeTableType {
let mut global_table = TypeTableType::new();
global_table.insert(0, String::from("file_id"));
global_table.insert(1, String::from("capabilities"));
global_table.insert(2, String::from("device_settings"));
global_table.insert(3, String::from("user_profile"));
global_table.insert(4, String::from("hrm_profile"));
global_table.insert(5, String::from("sdm_profile"));
global_table.insert(6, String::from("bike_profile"));
global_table.insert(7, String::from("zones_target"));
global_table.insert(8, String::from("hr_zone"));
global_table.insert(9, String::from("power_zone"));
global_table.insert(10, String::from("met_zone"));
global_table.insert(12, String::from("sport"));
global_table.insert(15, String::from("goal"));
global_table.insert(18, String::from("session"));
global_table.insert(19, String::from("lap"));
global_table.insert(20, String::from("record"));
global_table.insert(21, String::from("event"));
global_table.insert(23, String::from("device_info"));
global_table.insert(26, String::from("workout"));
global_table.insert(27, String::from("workout_step"));
global_table.insert(28, String::from("schedule"));
global_table.insert(30, String::from("weight_scale"));
global_table.insert(31, String::from("course"));
global_table.insert(32, String::from("course_point"));
global_table.insert(33, String::from("totals"));
global_table.insert(34, String::from("activity"));
global_table.insert(35, String::from("software"));
global_table.insert(37, String::from("file_capabilities"));
global_table.insert(38, String::from("mesg_capabilities"));
global_table.insert(39, String::from("field_capabilities"));
global_table.insert(49, String::from("file_creator"));
global_table.insert(51, String::from("blood_pressure"));
global_table.insert(53, String::from("speed_zone"));
global_table.insert(55, String::from("monitoring"));
global_table.insert(72, String::from("training_file"));
global_table.insert(78, String::from("hrv"));
global_table.insert(80, String::from("ant_rx"));
global_table.insert(81, String::from("ant_tx"));
global_table.insert(82, String::from("ant_channel_id"));
global_table.insert(101, String::from("length"));
global_table.insert(103, String::from("monitoring_info"));
global_table.insert(105, String::from("pad"));
global_table.insert(106, String::from("slave_device"));
global_table.insert(131, String::from("cadance_sonze"));
global_table.insert(132, String::from("hr"));
global_table.insert(142, String::from("segment_lap"));
global_table.insert(145, String::from("memo_glob"));
global_table.insert(148, String::from("segment_id"));
global_table.insert(149, String::from("segment_leaderboard_entry"));
global_table.insert(150, String::from("segment_point"));
global_table.insert(151, String::from("segment_file"));
global_table.insert(160, String::from("gps_metadata"));
global_table.insert(161, String::from("camera_event"));
global_table.insert(162, String::from("timestamp_correlation"));
global_table.insert(164, String::from("gyroscope_data"));
global_table.insert(165, String::from("accelerometer_data"));
global_table.insert(167, String::from("three_d_sensor_calibration"));
global_table.insert(169, String::from("video_frame"));
global_table.insert(174, String::from("obdii_data"));
global_table.insert(177, String::from("nmea_sentence"));
global_table.insert(178, String::from("aviation_attitude"));
global_table.insert(184, String::from("video"));
global_table.insert(185, String::from("video_title"));
global_table.insert(186, String::from("video_description"));
global_table.insert(187, String::from("video_clip"));
global_table.insert(0xFF00, String::from("mfg_range_min"));
global_table.insert(0xFFFE, String::from("mfg_range_max"));
global_table
}
}
|
/// The sequence number of a message. The first message in the file has `SeqNum(0)`.
#[derive(Debug, PartialEq, Copy, Clone, PartialOrd)]
pub struct SeqNum(pub u64);
/// A byte offset into the file.
#[derive(Debug, PartialEq, Copy, Clone)]
pub struct ByteOffset(pub u64);
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct Target { pub field: u64, pub value: u64 }
|
mod cmd;
mod config;
mod error;
pub use cmd::*;
pub use config::*;
pub use error::make_generic as make_generic_error;
|
use super::GetFlag;
use super::{Gender, GetFlag::*, Results};
use serde::Deserialize;
/// All valid flags for get staff method
pub const STAFF_FLAGS: [GetFlag; 5] = [Basic, Details, Aliases, Vns, Voiced];
/// Results returned from get staff method
#[derive(Deserialize, Debug, PartialEq)]
pub struct GetStaffResults {
#[serde(flatten)]
pub results: Results,
pub items: Vec<GetStaffResponse>,
}
/// All fields returned by get staff method
/// fields are either Some or None depending on GetFlag param passed to get function
#[derive(Deserialize, Debug, PartialEq)]
pub struct GetStaffResponse {
pub id: usize,
pub name: Option<String>,
#[serde(rename = "original")]
pub original_name: Option<String>,
pub gender: Option<Gender>,
pub language: Option<String>,
pub links: Option<Links>,
pub description: Option<String>,
// TODO Deserialize to struct
pub aliases: Option<Vec<(usize, String, String)>>,
pub main_alias: Option<usize>,
pub vns: Option<Vec<Vn>>,
pub voiced: Option<Vec<CV>>,
}
/// External site links
#[derive(Deserialize, Debug, PartialEq)]
pub struct Links {
pub homepage: Option<String>,
pub wikipedia: Option<String>,
pub twitter: Option<String>,
#[serde(rename = "anidb")]
pub anidb_id: Option<usize>,
pub pixiv: Option<String>,
pub wikidata: Option<String>,
}
/// Visual novels that this staff entry has been credited in (excluding character voicing).
#[derive(Deserialize, Debug, PartialEq)]
pub struct Vn {
pub id: usize,
#[serde(rename = "aid")]
pub alias_id: usize,
pub role: String,
pub note: Option<String>,
}
/// Characters that this staff entry has voiced.
#[derive(Deserialize, Debug, PartialEq)]
pub struct CV {
#[serde(rename = "id")]
pub vn_id: usize,
#[serde(rename = "aid")]
pub alias_id: usize,
#[serde(rename = "cid")]
pub character_id: usize,
pub note: Option<String>,
}
|
// Copyright 2019-2020 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! This module defines `HostState` and `HostContext` structs which provide logic and state
//! required for execution of host.
use crate::instance_wrapper::InstanceWrapper;
use crate::util;
use codec::{Decode, Encode};
use log::trace;
use sc_executor_common::error::Result;
use sc_executor_common::sandbox::{self, SandboxCapabilities, SupervisorFuncIndex};
use sp_allocator::FreeingBumpHeapAllocator;
use sp_core::sandbox as sandbox_primitives;
use sp_wasm_interface::{FunctionContext, MemoryId, Pointer, Sandbox, WordSize};
use std::{cell::RefCell, rc::Rc};
use wasmtime::{Func, Val};
/// Wrapper type for pointer to a Wasm table entry.
///
/// The wrapper type is used to ensure that the function reference is valid as it must be unsafely
/// dereferenced from within the safe method `<HostContext as SandboxCapabilities>::invoke`.
#[derive(Clone)]
pub struct SupervisorFuncRef(Func);
/// The state required to construct a HostContext context. The context only lasts for one host
/// call, whereas the state is maintained for the duration of a Wasm runtime call, which may make
/// many different host calls that must share state.
pub struct HostState {
// We need some interior mutability here since the host state is shared between all host
// function handlers and the wasmtime backend's `impl WasmRuntime`.
//
// Furthermore, because of recursive calls (e.g. runtime can create and call an sandboxed
// instance which in turn can call the runtime back) we have to be very careful with borrowing
// those.
//
// Basically, most of the interactions should do temporary borrow immediately releasing the
// borrow after performing necessary queries/changes.
sandbox_store: RefCell<sandbox::Store<SupervisorFuncRef>>,
allocator: RefCell<FreeingBumpHeapAllocator>,
instance: Rc<InstanceWrapper>,
}
impl HostState {
/// Constructs a new `HostState`.
pub fn new(allocator: FreeingBumpHeapAllocator, instance: Rc<InstanceWrapper>) -> Self {
HostState {
sandbox_store: RefCell::new(sandbox::Store::new()),
allocator: RefCell::new(allocator),
instance,
}
}
/// Materialize `HostContext` that can be used to invoke a substrate host `dyn Function`.
pub fn materialize<'a>(&'a self) -> HostContext<'a> {
HostContext(self)
}
}
/// A `HostContext` implements `FunctionContext` for making host calls from a Wasmtime
/// runtime. The `HostContext` exists only for the lifetime of the call and borrows state from
/// a longer-living `HostState`.
pub struct HostContext<'a>(&'a HostState);
impl<'a> std::ops::Deref for HostContext<'a> {
type Target = HostState;
fn deref(&self) -> &HostState {
self.0
}
}
impl<'a> SandboxCapabilities for HostContext<'a> {
type SupervisorFuncRef = SupervisorFuncRef;
fn invoke(
&mut self,
dispatch_thunk: &Self::SupervisorFuncRef,
invoke_args_ptr: Pointer<u8>,
invoke_args_len: WordSize,
state: u32,
func_idx: SupervisorFuncIndex,
) -> Result<i64> {
let result = dispatch_thunk.0.call(&[
Val::I32(u32::from(invoke_args_ptr) as i32),
Val::I32(invoke_args_len as i32),
Val::I32(state as i32),
Val::I32(usize::from(func_idx) as i32),
]);
match result {
Ok(ret_vals) => {
let ret_val = if ret_vals.len() != 1 {
return Err(format!(
"Supervisor function returned {} results, expected 1",
ret_vals.len()
)
.into())
} else {
&ret_vals[0]
};
if let Some(ret_val) = ret_val.i64() {
Ok(ret_val)
} else {
return Err("Supervisor function returned unexpected result!".into())
}
},
Err(err) => Err(err.to_string().into()),
}
}
}
impl<'a> sp_wasm_interface::FunctionContext for HostContext<'a> {
fn read_memory_into(
&self,
address: Pointer<u8>,
dest: &mut [u8],
) -> sp_wasm_interface::Result<()> {
self.instance.read_memory_into(address, dest).map_err(|e| e.to_string())
}
fn write_memory(&mut self, address: Pointer<u8>, data: &[u8]) -> sp_wasm_interface::Result<()> {
self.instance.write_memory_from(address, data).map_err(|e| e.to_string())
}
fn allocate_memory(&mut self, size: WordSize) -> sp_wasm_interface::Result<Pointer<u8>> {
self.instance.allocate(&mut *self.allocator.borrow_mut(), size).map_err(|e| e.to_string())
}
fn deallocate_memory(&mut self, ptr: Pointer<u8>) -> sp_wasm_interface::Result<()> {
self.instance.deallocate(&mut *self.allocator.borrow_mut(), ptr).map_err(|e| e.to_string())
}
fn sandbox(&mut self) -> &mut dyn Sandbox {
self
}
}
impl<'a> Sandbox for HostContext<'a> {
fn memory_get(
&mut self,
memory_id: MemoryId,
offset: WordSize,
buf_ptr: Pointer<u8>,
buf_len: WordSize,
) -> sp_wasm_interface::Result<u32> {
let sandboxed_memory =
self.sandbox_store.borrow().memory(memory_id).map_err(|e| e.to_string())?;
sandboxed_memory.with_direct_access(|sandboxed_memory| {
let len = buf_len as usize;
let src_range = match util::checked_range(offset as usize, len, sandboxed_memory.len())
{
Some(range) => range,
None => return Ok(sandbox_primitives::ERR_OUT_OF_BOUNDS),
};
let supervisor_mem_size = self.instance.memory_size() as usize;
let dst_range = match util::checked_range(buf_ptr.into(), len, supervisor_mem_size) {
Some(range) => range,
None => return Ok(sandbox_primitives::ERR_OUT_OF_BOUNDS),
};
self.instance
.write_memory_from(
Pointer::new(dst_range.start as u32),
&sandboxed_memory[src_range],
)
.expect("ranges are checked above; write can't fail; qed");
Ok(sandbox_primitives::ERR_OK)
})
}
fn memory_set(
&mut self,
memory_id: MemoryId,
offset: WordSize,
val_ptr: Pointer<u8>,
val_len: WordSize,
) -> sp_wasm_interface::Result<u32> {
let sandboxed_memory =
self.sandbox_store.borrow().memory(memory_id).map_err(|e| e.to_string())?;
sandboxed_memory.with_direct_access_mut(|sandboxed_memory| {
let len = val_len as usize;
let supervisor_mem_size = self.instance.memory_size() as usize;
let src_range = match util::checked_range(val_ptr.into(), len, supervisor_mem_size) {
Some(range) => range,
None => return Ok(sandbox_primitives::ERR_OUT_OF_BOUNDS),
};
let dst_range = match util::checked_range(offset as usize, len, sandboxed_memory.len())
{
Some(range) => range,
None => return Ok(sandbox_primitives::ERR_OUT_OF_BOUNDS),
};
self.instance
.read_memory_into(
Pointer::new(src_range.start as u32),
&mut sandboxed_memory[dst_range],
)
.expect("ranges are checked above; read can't fail; qed");
Ok(sandbox_primitives::ERR_OK)
})
}
fn memory_teardown(&mut self, memory_id: MemoryId) -> sp_wasm_interface::Result<()> {
self.sandbox_store.borrow_mut().memory_teardown(memory_id).map_err(|e| e.to_string())
}
fn memory_new(&mut self, initial: u32, maximum: MemoryId) -> sp_wasm_interface::Result<u32> {
self.sandbox_store.borrow_mut().new_memory(initial, maximum).map_err(|e| e.to_string())
}
fn invoke(
&mut self,
instance_id: u32,
export_name: &str,
args: &[u8],
return_val: Pointer<u8>,
return_val_len: u32,
state: u32,
) -> sp_wasm_interface::Result<u32> {
trace!(target: "sp-sandbox", "invoke, instance_idx={}", instance_id);
// Deserialize arguments and convert them into wasmi types.
let args = Vec::<sp_wasm_interface::Value>::decode(&mut &args[..])
.map_err(|_| "Can't decode serialized arguments for the invocation")?
.into_iter()
.map(Into::into)
.collect::<Vec<_>>();
let instance =
self.sandbox_store.borrow().instance(instance_id).map_err(|e| e.to_string())?;
let result = instance.invoke(export_name, &args, self, state);
match result {
Ok(None) => Ok(sandbox_primitives::ERR_OK),
Ok(Some(val)) => {
// Serialize return value and write it back into the memory.
sp_wasm_interface::ReturnValue::Value(val.into()).using_encoded(|val| {
if val.len() > return_val_len as usize {
Err("Return value buffer is too small")?;
}
<HostContext as FunctionContext>::write_memory(self, return_val, val)
.map_err(|_| "can't write return value")?;
Ok(sandbox_primitives::ERR_OK)
})
},
Err(_) => Ok(sandbox_primitives::ERR_EXECUTION),
}
}
fn instance_teardown(&mut self, instance_id: u32) -> sp_wasm_interface::Result<()> {
self.sandbox_store.borrow_mut().instance_teardown(instance_id).map_err(|e| e.to_string())
}
fn instance_new(
&mut self,
dispatch_thunk_id: u32,
wasm: &[u8],
raw_env_def: &[u8],
state: u32,
) -> sp_wasm_interface::Result<u32> {
// Extract a dispatch thunk from the instance's table by the specified index.
let dispatch_thunk = {
let table_item = self
.instance
.table()
.as_ref()
.ok_or_else(|| "Runtime doesn't have a table; sandbox is unavailable")?
.get(dispatch_thunk_id);
let func_ref = table_item
.ok_or_else(|| "dispatch_thunk_id is out of bounds")?
.funcref()
.ok_or_else(|| "dispatch_thunk_idx should be a funcref")?
.ok_or_else(|| "dispatch_thunk_idx should point to actual func")?
.clone();
SupervisorFuncRef(func_ref)
};
let guest_env =
match sandbox::GuestEnvironment::decode(&*self.sandbox_store.borrow(), raw_env_def) {
Ok(guest_env) => guest_env,
Err(_) => return Ok(sandbox_primitives::ERR_MODULE as u32),
};
let instance_idx_or_err_code =
match sandbox::instantiate(self, dispatch_thunk, wasm, guest_env, state)
.map(|i| i.register(&mut *self.sandbox_store.borrow_mut()))
{
Ok(instance_idx) => instance_idx,
Err(sandbox::InstantiationError::StartTrapped) => sandbox_primitives::ERR_EXECUTION,
Err(_) => sandbox_primitives::ERR_MODULE,
};
Ok(instance_idx_or_err_code as u32)
}
fn get_global_val(
&self,
instance_idx: u32,
name: &str,
) -> sp_wasm_interface::Result<Option<sp_wasm_interface::Value>> {
self.sandbox_store
.borrow()
.instance(instance_idx)
.map(|i| i.get_global_val(name))
.map_err(|e| e.to_string())
}
}
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use core::{mem, slice};
use crate::{lmem};
#[allow(non_camel_case_types)]
#[derive(Pod, Copy, Clone)]
#[repr(transparent)]
pub struct d8(u8);
impl d8 {
pub const fn new(byte: u8) -> d8 {
d8(byte)
}
pub fn from_byte_slice(bytes: &[u8]) -> &[d8] {
unsafe {
slice::from_raw_parts(bytes.as_ptr() as *const _, bytes.len())
}
}
pub fn from_byte_slice_mut(bytes: &mut [u8]) -> &mut [d8] {
unsafe {
slice::from_raw_parts_mut(bytes.as_mut_ptr() as *mut _, bytes.len())
}
}
pub fn from_byte_slice_slice<'a, 'b>(bytes: &'a [&'b [u8]]) -> &'a [&'b [d8]] {
unsafe {
slice::from_raw_parts(bytes.as_ptr() as *const _, bytes.len())
}
}
pub unsafe fn as_byte(&self) -> &u8 {
&self.0
}
pub unsafe fn as_mut_byte(&mut self) -> &mut u8 {
&mut self.0
}
}
pub trait DataSlice {
unsafe fn as_bytes(&self) -> &[u8];
unsafe fn as_mut_bytes(&mut self) -> &mut [u8];
fn align_for<T>(&self) -> &[d8];
fn align_for_mut<T>(&mut self) -> &mut [d8];
}
impl DataSlice for [d8] {
unsafe fn as_bytes(&self) -> &[u8] {
mem::transmute(self)
}
unsafe fn as_mut_bytes(&mut self) -> &mut [u8] {
mem::transmute(self)
}
fn align_for<T>(&self) -> &[d8] {
unsafe { mem::transmute(lmem::align_for::<T>(self.as_bytes())) }
}
fn align_for_mut<T>(&mut self) -> &mut [d8] {
unsafe { mem::transmute(lmem::align_for_mut::<T>(self.as_mut_bytes())) }
}
}
|
// El discurso de Zoe
//
// Copyright (C) 2016 GUL UC3M
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use rand::{thread_rng, Rng};
use texture::scenario::Scenario;
use state::TypeState;
pub struct ExtIr;
/// Time lost
fn open_bank() -> i32 {
let mut rng = thread_rng();
let lost: i32 = rng.gen_range(2, 5);
println!("De camino a la entrada aparece una muchacha con una carpeta roja entre los");
println!("brazos. 'Hola, ¿tienes un momentito?' Vengo a ofrecerte una cuenta");
println!("open-bank...\n");
println!("Saven los dioses el tiempo que te retuvo la muchacha. Entras en el");
println!("edificio, miras tu teléfono, has perdido {} minutos.", lost);
return lost * 60;
}
impl <S: TypeState> Scenario <S> for ExtIr {
fn load(&self, _: &mut Box<S>) -> Option<String> {
println!("Para llegar al edificio 7 puedes elegir las entradas F, G o E.");
println!("También puedes ir hacia el edificio Sabatini o la biblioteca");
return None;
}
fn do_action(&self, command: &str, state: &mut Box<S>) -> Option<String> {
match command {
"ir a E" | "ir a entrada E" | "E" | "ir E" => {
let lost = open_bank();
state.reduce_time(lost + 30);
state.set_string("one_zone".to_string(), "E".to_string());
return Some("one_zero".to_string())
},
"ir a F" | "ir a entrada F" | "F" | "ir F" => {
let lost = open_bank();
state.reduce_time(lost + 30);
state.set_string("one_zone".to_string(), "F".to_string());
return Some("one_zero".to_string())
},
"ir a G" | "ir a entrada G" | "G" | "ir G" => {
state.reduce_time(30);
state.set_string("one_zone".to_string(), "G".to_string());
return Some("one_zero".to_string())
},
"ir a Sabatini" | "ir al Sabatini" | "Sabatini" | "ir 2" | "ir al 2" => {
let lost = open_bank();
state.reduce_time(lost + 30);
state.set_value("sabatini_floor".to_string(), 1);
state.set_string("sabatini_corner".to_string(), "C-D".to_string());
return Some("sabatini".to_string())
},
"ir a biblioteca" | "ir a la biblioteca" | "biblioteca" | "ir biblioteca" => {
println!("Vas a la biblioteca.");
state.reduce_time(30);
return Some("biblioteca_hall".to_string())
},
_ => {println!("El tiempo es oro, ¿a dónde vas a ir?")}
}
return None;
}
}
|
use super::{
Load,
property::Property,
fun::Fun,
local::Local,
MemmyGenerator
};
use ir::{ Chunk, hir::HIRInstruction };
use core::pos::BiPos;
use ir_traits::{ ReadInstruction };
use notices::{
DiagnosticSourceBuilder,
DiagnosticLevel
};
#[derive(Debug, Clone)]
pub struct Statement<'a>{
pos: BiPos,
kind: StatementKind<'a>,
}
#[derive(Debug, Clone)]
pub enum StatementKind<'a>{
Property(Property<'a>),
Fun(Fun<'a>),
Local(Local)
}
impl<'a> Load for Statement<'a>{
type Output = Statement<'a>;
fn load(chunk: &Chunk, memmy: &MemmyGenerator) -> Result<Self::Output, ()> {
let ins = chunk.read_instruction();
match &ins{
Some(HIRInstruction::Fn) => {
let fun = match Fun::load(chunk, memmy){
Ok(fun) => fun,
Err(diag) => return Err(diag)
};
let pos = match chunk.read_pos(){
Ok(pos) => pos,
Err(msg) => {
let diagnosis = DiagnosticSourceBuilder::new(memmy.module_name.clone(), 0)
.level(DiagnosticLevel::Error)
.message(msg)
.build();
memmy.emit_diagnostic(&[], &[diagnosis]);
return Err(())
}
};
Ok(Statement{
kind: StatementKind::Fun(fun),
pos
})
},
Some(HIRInstruction::Property) => {
let property = match Property::load(chunk, memmy){
Ok(fun) => fun,
Err(diag) => return Err(diag)
};
let pos = match chunk.read_pos(){
Ok(pos) => pos,
Err(msg) => {
let diagnosis = DiagnosticSourceBuilder::new(memmy.module_name.clone(), 0)
.level(DiagnosticLevel::Error)
.message(msg)
.build();
memmy.emit_diagnostic(&[], &[diagnosis]);
return Err(())
}
};
Ok(Statement{
kind: StatementKind::Property(property),
pos
})
},
Some(HIRInstruction::LocalVar) => {
let local = match Local::load(chunk, memmy){
Ok(local) => local,
Err(diag) => return Err(diag)
};
let pos = match chunk.read_pos(){
Ok(pos) => pos,
Err(msg) => {
let diagnosis = DiagnosticSourceBuilder::new(memmy.module_name.clone(), 0)
.level(DiagnosticLevel::Error)
.message(msg)
.build();
memmy.emit_diagnostic(&[], &[diagnosis]);
return Err(())
}
};
Ok(Statement{
kind: StatementKind::Local(local),
pos
})
}
_ => {
let diagnosis = DiagnosticSourceBuilder::new(memmy.module_name.clone(), 0)
.message(format!("This feature is not yet implemented: {:?}", ins.clone().unwrap()))
.level(DiagnosticLevel::Error)
.build();
memmy.emit_diagnostic(&[], &[diagnosis]);
return Err(())
}
}
}
} |
use crate::bytecode::{CodeObject, LibraryObject, Op};
use crate::error::{CompileError, Error, ErrorContext, ErrorKind, Result};
use crate::objectify::Translate;
use crate::primitive::{Arity, RuntimePrimitive};
use crate::scm::Scm;
use crate::symbol::Symbol;
use crate::syntax::definition::GlobalDefine;
use crate::syntax::variable::VarDef;
use crate::syntax::{
Alternative, Application, Assignment, BoxCreate, BoxRead, BoxWrite, Constant, Expression,
FixLet, FlatClosure, FreeReference, Function, GlobalAssignment, GlobalReference,
GlobalVariable, Import, LetContKind, LetContinuation, Library, LocalReference, Program,
Reference, Sequence, Variable,
};
use crate::utils::{Named, Sourced};
use std::collections::HashMap;
use std::path::PathBuf;
pub fn compile_program(
prog: &Program,
trans: &Translate,
glob_alloc: &mut GlobalAllocator,
) -> Result<CodeObject> {
let mut bcgen = BytecodeGenerator::new(vec![], trans, glob_alloc);
let mut code = vec![];
code.extend(bcgen.compile_import(&prog.imports)?);
code.extend(bcgen.compile(&prog.body, true)?);
code.push(Op::Return);
let mut library_code = vec![];
for lib in bcgen.libs.clone() {
library_code.push(bcgen.build_constant(Scm::string(lib.to_str().unwrap())));
library_code.push(Op::InitLibrary);
}
library_code.extend(code);
Ok(CodeObject::new(
Arity::Exact(0),
prog.body.source().clone(),
library_code,
bcgen.constants,
))
}
pub fn compile_function(
func: &Function,
closure_vars: Vec<Symbol>,
trans: &Translate,
glob_alloc: &mut GlobalAllocator,
) -> Result<CodeObject> {
let mut bcgen = BytecodeGenerator::new(closure_vars, trans, glob_alloc);
bcgen.env = func.variables.iter().map(|var| var.name()).collect();
let mut code = bcgen.compile(&func.body, true)?;
code.push(Op::Return);
Ok(CodeObject::new(
func.arity(),
func.span.clone(),
code,
bcgen.constants,
))
}
pub fn compile_library(
lib: &Library,
trans: &Translate,
glob_alloc: &mut GlobalAllocator,
) -> Result<LibraryObject> {
let mut bcgen = BytecodeGenerator::new(vec![], &trans, glob_alloc);
let mut code = vec![];
code.extend(bcgen.compile_import(&lib.imports)?);
code.extend(bcgen.compile(&lib.body, false)?);
code.push(Op::Return);
let mut library_code = vec![];
for lib in bcgen.libs.clone() {
library_code.push(bcgen.build_constant(Scm::string(lib.to_str().unwrap())));
library_code.push(Op::InitLibrary);
}
library_code.extend(code);
let global_symbols: Vec<_> = trans.env.globals().map(|gv| gv.name()).collect();
Ok(LibraryObject::new(
lib.source().clone(),
library_code,
bcgen.constants,
global_symbols,
lib.exports.clone(),
))
}
#[derive(Debug)]
pub struct GlobalAllocator {
globals: HashMap<GlobalVariable, usize>,
vars: Vec<GlobalVariable>,
}
impl GlobalAllocator {
pub fn new() -> Self {
GlobalAllocator {
globals: HashMap::new(),
vars: vec![],
}
}
pub fn get_idx(&mut self, var: &GlobalVariable) -> usize {
if let Some(idx) = self.idx(var) {
idx
} else {
let idx = self.vars.len();
self.vars.push(var.clone());
self.globals.insert(var.clone(), idx);
idx
}
}
pub fn idx(&mut self, var: &GlobalVariable) -> Option<usize> {
self.globals.get(var).copied()
}
pub fn declare_alias(&mut self, var: &GlobalVariable, alias: &GlobalVariable) -> usize {
match (self.idx(var), self.idx(alias)) {
(None, None) => {
let idx = self.get_idx(var);
self.globals.insert(alias.clone(), idx);
idx
}
(Some(idx), None) => {
self.globals.insert(alias.clone(), idx);
idx
}
(None, Some(idx)) => {
self.globals.insert(var.clone(), idx);
self.vars[idx] = var.clone();
idx
}
(Some(idx1), Some(idx2)) => {
if idx1 != idx2 {
panic!(
"invalid global alias -- both variables exist: {:?} ({}), {:?} ({})",
var, idx1, alias, idx2
)
}
idx1
}
}
}
pub fn n_vars(&self) -> usize {
self.vars.len()
}
pub fn find_var(&self, idx: usize) -> &GlobalVariable {
&self.vars[idx]
}
}
#[derive(Debug)]
pub struct BytecodeGenerator<'a> {
trans: &'a Translate,
constants: Vec<Scm>,
env: Vec<Symbol>,
current_closure_vars: Vec<Symbol>,
libs: Vec<PathBuf>,
glob_alloc: &'a mut GlobalAllocator,
}
impl<'a> BytecodeGenerator<'a> {
pub fn new(
current_closure_vars: Vec<Symbol>,
trans: &'a Translate,
glob_alloc: &'a mut GlobalAllocator,
) -> Self {
BytecodeGenerator {
trans,
constants: vec![],
env: vec![],
current_closure_vars,
libs: vec![],
glob_alloc,
}
}
fn compile(&mut self, node: &Expression, tail: bool) -> Result<Vec<Op>> {
use Expression::*;
match node {
NoOp(_) => Ok(vec![]),
Constant(c) => self.compile_constant(c, tail),
Sequence(s) => self.compile_sequence(s, tail),
Alternative(a) => self.compile_alternative(a, tail),
Reference(r) => self.compile_reference(r, tail),
Assignment(a) => self.compile_assignment(a, tail),
FixLet(f) => self.compile_fixlet(f, tail),
FlatClosure(c) => self.compile_closure(c, tail),
Application(a) => self.compile_application(a, tail),
BoxCreate(b) => self.compile_box_create(b, tail),
BoxWrite(b) => self.compile_box_write(b, tail),
BoxRead(b) => self.compile_box_read(b, tail),
GlobalDefine(d) => self.compile_global_def(d),
MagicKeyword(m) => Err(Error {
kind: ErrorKind::Compile(CompileError::MacroUsedAsValue(m.name)),
context: ErrorContext::Source(node.source().clone()),
}),
LetContinuation(l) => self.compile_letcont(l, tail),
_ => unimplemented!(
"Byte code compilation of:\n {:#?}\n {:?}",
node.source(),
node
),
}
}
fn compile_reference(&mut self, node: &Reference, tail: bool) -> Result<Vec<Op>> {
use Reference::*;
match node {
LocalReference(l) => self.compile_local_ref(l, tail),
FreeReference(f) => self.compile_free_ref(f, tail),
GlobalReference(g) => self.compile_global_ref(g, tail),
}
}
fn compile_assignment(&mut self, node: &Assignment, tail: bool) -> Result<Vec<Op>> {
use Assignment::*;
match node {
LocalAssignment(_) => {
unimplemented!("Local assignment should happen through a box write")
}
GlobalAssignment(g) => self.compile_global_set(g, tail),
}
}
fn compile_constant(&mut self, node: &Constant, _tail: bool) -> Result<Vec<Op>> {
Ok(vec![self.build_constant((&node.value).into())])
}
fn build_constant(&mut self, value: Scm) -> Op {
let idx = self.constants.iter().position(|x| x.equals(&value));
let idx = match idx {
None => {
let n = self.constants.len();
self.constants.push(value);
n
}
Some(i) => i,
};
Op::Constant(idx)
}
fn compile_sequence(&mut self, node: &Sequence, tail: bool) -> Result<Vec<Op>> {
let mut m1 = self.compile(&node.first, false)?;
let m2 = self.compile(&node.next, tail)?;
m1.extend(m2);
Ok(m1)
}
fn compile_alternative(&mut self, node: &Alternative, tail: bool) -> Result<Vec<Op>> {
let m1 = self.compile(&node.condition, false)?;
let m2 = self.compile(&node.consequence, tail)?;
let m3 = self.compile(&node.alternative, tail)?;
let mut meaning = m1;
meaning.push(Op::JumpFalse(m2.len() as isize + 1));
meaning.extend(m2);
meaning.push(Op::Jump(m3.len() as isize));
meaning.extend(m3);
Ok(meaning)
}
fn compile_local_ref(&mut self, node: &LocalReference, _tail: bool) -> Result<Vec<Op>> {
let idx = self.index_of_local(&node.var.name());
Ok(vec![Op::LocalRef(idx)])
}
fn compile_free_ref(&mut self, node: &FreeReference, _tail: bool) -> Result<Vec<Op>> {
let idx = self
.current_closure_vars
.iter()
.position(|&fv| fv == node.var.name())
.unwrap();
Ok(vec![Op::FreeRef(idx)])
}
fn compile_global_ref(&mut self, node: &GlobalReference, _tail: bool) -> Result<Vec<Op>> {
let idx = self.glob_alloc.get_idx(&node.var);
Ok(vec![Op::GlobalRef(idx)])
}
fn compile_global_set(&mut self, node: &GlobalAssignment, _tail: bool) -> Result<Vec<Op>> {
let idx = self.glob_alloc.get_idx(&node.variable);
let mut meaning = self.compile(&node.form, false)?;
meaning.push(Op::GlobalSet(idx));
Ok(meaning)
}
fn compile_global_def(&mut self, node: &GlobalDefine) -> Result<Vec<Op>> {
let mut meaning = self.compile(&node.form, false)?;
meaning.push(self.build_global_def(&node.variable));
Ok(meaning)
}
fn build_global_def(&mut self, var: &GlobalVariable) -> Op {
let idx = self.glob_alloc.get_idx(var);
Op::GlobalDef(idx)
}
fn compile_fixlet(&mut self, node: &FixLet, tail: bool) -> Result<Vec<Op>> {
let n = self.env.len();
let mut meaning = vec![];
for (var, arg) in node.variables.iter().zip(&node.arguments) {
let m = self.compile(arg, false)?;
meaning.extend(m);
meaning.push(Op::PushVal);
self.env.push(var.name());
}
let m = self.compile(&node.body, tail)?;
meaning.extend(m);
self.env.truncate(n);
if !meaning.last().map(Op::is_terminal).unwrap_or(false) {
// No need to generate instructions after a terminal instruction.
// Tail calls should truncate the value stack correctly.
meaning.push(Op::Drop(node.variables.len()));
}
Ok(meaning)
}
fn compile_closure(&mut self, node: &FlatClosure, _tail: bool) -> Result<Vec<Op>> {
let free_vars = node.free_vars.iter().map(|s| s.var_name()).collect();
let function = compile_function(&node.func, free_vars, self.trans, self.glob_alloc)?;
let function = Box::leak(Box::new(function));
let mut meaning = vec![];
for fv in node.free_vars.iter().rev() {
let m = self.compile_reference(fv, false)?;
meaning.extend(m);
meaning.push(Op::PushVal);
}
let n_free = node.free_vars.len();
meaning.push(Op::MakeClosure(n_free, function));
Ok(meaning)
}
fn compile_application(&mut self, node: &Application, tail: bool) -> Result<Vec<Op>> {
// todo: does Scheme require that the function is evaluated first?
let mut meaning = vec![];
let n = self.env.len();
for a in &node.arguments {
let m = self.compile(a, false)?;
meaning.extend(m);
meaning.push(Op::PushVal);
self.env.push(Symbol::uninterned("_")); // add dummy variable to env, so that other variables are indexed correctly
}
if let Some(mi) =
self.compile_intrinsic_application(&node.function, node.arguments.len(), tail)?
{
if !meaning.is_empty() {
meaning.pop(); // don't push last argument
}
meaning.extend(mi);
} else {
let mf = self.compile(&node.function, false)?;
meaning.extend(mf);
let arity = node.arguments.len();
match tail {
true => meaning.push(Op::TailCall(arity)),
false => meaning.push(Op::Call(arity)),
}
}
self.env.truncate(n);
Ok(meaning)
}
fn compile_intrinsic_application(
&mut self,
func: &Expression,
n_args: usize,
tail: bool,
) -> Result<Option<Vec<Op>>> {
match func {
Expression::Reference(Reference::GlobalReference(GlobalReference { var, .. })) => {
match var.value() {
VarDef::Value(Scm::Primitive(RuntimePrimitive { name, .. })) => match name {
"cons" => Ok(Some(vec![Op::Cons])),
"car" => Ok(Some(vec![Op::Car])),
"cdr" => Ok(Some(vec![Op::Cdr])),
"call/cc" => Ok(Some(vec![Op::PushCC(1), Op::Call(1)])),
"call/ep" => Ok(Some(vec![Op::PushEP(2), Op::Call(1), Op::PopEP])),
"apply" => Ok(Some(vec![
Op::PreApply(n_args),
if tail { Op::TailCallN } else { Op::CallN },
])),
_ => Ok(None),
},
_ => Ok(None),
}
}
_ => Ok(None),
}
}
fn compile_box_create(&mut self, node: &BoxCreate, _tail: bool) -> Result<Vec<Op>> {
let idx = self
.env
.iter()
.enumerate()
.rev()
.find(|&(_, &v)| v == node.variable.name())
.unwrap()
.0;
self.env.push(node.variable.name());
Ok(vec![Op::Boxify(idx)])
}
fn compile_box_write(&mut self, node: &BoxWrite, _tail: bool) -> Result<Vec<Op>> {
let mut meaning = self.compile_reference(&node.reference, false)?;
meaning.push(Op::PushVal);
meaning.extend(self.compile(&node.form, false)?);
meaning.push(Op::BoxSet);
Ok(meaning)
}
fn compile_box_read(&mut self, node: &BoxRead, _tail: bool) -> Result<Vec<Op>> {
let mut meaning = self.compile_reference(&node.reference, false)?;
meaning.push(Op::BoxGet);
Ok(meaning)
}
fn index_of_local(&self, name: &Symbol) -> usize {
self.env
.iter()
.enumerate()
.rev()
.find(|&(_, v)| v == name)
.unwrap()
.0
}
fn compile_letcont(&mut self, node: &LetContinuation, tail: bool) -> Result<Vec<Op>> {
self.env.push(node.variable.name());
let mut meaning_body = self.compile(&node.body, tail)?;
self.env.pop();
if !meaning_body.last().map(Op::is_terminal).unwrap_or(false) {
// No need to generate instructions after a terminal instruction.
// Tail calls should truncate the value stack correctly.
meaning_body.push(Op::Drop(1));
}
let n = meaning_body.len() as isize;
Ok(match node.kind {
LetContKind::IndefiniteContinuation => splice!(vec![Op::PushCC(n)], meaning_body),
LetContKind::ExitProcedure => {
splice!(vec![Op::PushEP(n + 1)], meaning_body, vec![Op::PopEP])
}
})
}
fn compile_import(&mut self, node: &Import) -> Result<Vec<Op>> {
for set in &node.import_sets {
for item in &set.items {
match (&item.export_var, &item.import_var) {
(Variable::MagicKeyword(_), _) => continue,
(Variable::GlobalVariable(ex), Variable::GlobalVariable(im)) => {
self.glob_alloc.declare_alias(ex, im);
}
_ => panic!("Invalid Import"),
}
}
if !self.libs.contains(&set.library_path) {
self.libs.push(set.library_path.clone())
}
}
Ok(vec![])
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bytecode::VirtualMachine;
use crate::env::Env;
use crate::source::SourceLocation::NoSource;
use crate::syntax::GlobalVariable;
#[test]
fn compile_intrinsics() {
let expr = Expression::Reference(Reference::GlobalReference(GlobalReference::new(
GlobalVariable::defined(
Symbol::new(""),
"no-matter",
Scm::Primitive(RuntimePrimitive::new(
"cons",
Arity::Exact(2),
|_: &[Scm], _: &mut VirtualMachine| unimplemented!(),
)),
),
NoSource,
)));
let mut ga = GlobalAllocator::new();
let trans = Translate::new(Env::new());
let mut gen = BytecodeGenerator::new(vec![], &trans, &mut ga);
let code = gen.compile_intrinsic_application(&expr, 2, true).unwrap();
assert_eq!(code, Some(vec![Op::Cons]));
}
}
|
use std::collections::HashMap;
/// An implementation of Heeren's algorithm for type inference.
use std::fmt::Debug;
use std::rc::Rc;
/// Constraints help deduce the actual type of a type variables.
/// there are a few types.i
#[derive(Clone, PartialEq)]
pub enum Constraint<T>
where
T: Clone + PartialEq + Debug,
{
/// specifie that the types introduced must be
/// equal to each other.
Equality(Rc<TypeVar>, Rc<TypeVar>),
// specifies that the LHS must be a generic instance of the RHS.
IsGeneric(Rc<TypeVar>, Rc<TypeVar>),
// specifies that the LHS should be the result of generalization the RHS.
ImplicitInstanceConstraint(Rc<TypeVar>, Rc<TypeVar>),
/// declares that the type of typevar is of the literal.
IsLiteral(Rc<TypeVar>, T),
}
/// A TypeVar collects assumptions around this variable
pub type TypeVar = usize;
/// AssumptionSets store the assumptions made around
/// specific type variables.
pub struct ConstraintSet<T>
where
T: Clone + PartialEq + Debug,
{
pub constraints: Vec<Constraint<T>>,
/// all the types that have been instantiated for the assumption set.
pub types: Vec<Rc<TypeVar>>,
}
/// given an set of assumptions and type variables, return back a substitution set if there
/// are no conflicting constraints
pub fn solve_types<T>(constraints: &ConstraintSet<T>) -> Result<SubstitutionSet<T>, String>
where
T: Clone + PartialEq + Debug,
{
// first, we build out a table referencing type variables to a discrete set of constraints.
// many type variables can be unified in this step.
let mut constraints_by_type: ConstraintsByType<T> = ConstraintsByType::new();
// we iterate through all constraints. Specifically for the equality constraint, we unify constraints
for c in &constraints.constraints {
match c {
Constraint::Equality(ref l, ref r) => {
constraints_by_type.unify(l, r);
}
Constraint::IsLiteral(ref var, ref typ) => {
let constraint_list = constraints_by_type.get_or_create(var);
constraint_list.push(Constraint::IsLiteral(var.clone(), typ.clone()))
}
Constraint::ImplicitInstanceConstraint(ref lef, ref right) => {}
Constraint::IsGeneric(ref lef, ref right) => {}
}
}
// now we evaluate these constraints.
let mut type_by_reference = HashMap::new();
let mut substitution_set = SubstitutionSet::new();
for type_var in &constraints.types {
let typ = solve(&mut constraints_by_type, &mut type_by_reference, &type_var)?;
substitution_set.insert(type_var.clone(), typ);
}
Ok(substitution_set)
}
/// given constraitns by type, solve the reference specified.
fn solve<T: Clone + PartialEq + Debug>(
constraints_by_type: &mut ConstraintsByType<T>,
type_by_reference: &mut HashMap<usize, T>,
type_var: &TypeVar,
) -> Result<Option<T>, String> {
let reference = constraints_by_type
.reference_by_type
.get(type_var)
.unwrap_or(&0);
// if the calculation has happend already, use it.
if let Some(ref typ) = type_by_reference.get(reference) {
return Ok(Some((*typ).clone()));
}
let constraints = &constraints_by_type.constraints_by_reference[*reference];
let mut typ = None;
for c in constraints {
match c {
Constraint::IsLiteral(ref _var, ref literal_type) => {
if typ == None {
typ = Some(literal_type.clone());
} else {
if typ != Some((*literal_type).clone()) {
return Err(format!("type mismatch: {:?} and {:?}", typ, literal_type));
}
}
}
// TODO: in the future, we need to resolve generics.
_ => {}
}
}
Ok(typ)
}
pub type ReferenceIndex = usize;
struct ConstraintsByType<T>
where
T: Clone + PartialEq + Debug,
{
pub constraints_by_reference: Vec<Vec<Constraint<T>>>,
pub reference_by_type: HashMap<TypeVar, usize>,
}
impl<T> ConstraintsByType<T>
where
T: Clone + PartialEq + Debug,
{
pub fn new() -> ConstraintsByType<T> {
ConstraintsByType {
constraints_by_reference: vec![],
reference_by_type: HashMap::new(),
}
}
pub fn get_or_create(&mut self, var: &TypeVar) -> &mut Vec<Constraint<T>> {
let type_index = match self.reference_by_type.get(var) {
Some(i) => i.clone(),
None => {
self.constraints_by_reference.push(vec![]);
self.constraints_by_reference.len() - 1
}
};
self.reference_by_type.insert(var.clone(), type_index);
return &mut self.constraints_by_reference[type_index];
}
/// given two type vars, unify those values and the
/// corresponding constraints.
/// this will also remap the constraint indices in
/// constraint_map.
pub fn unify(&mut self, left: &TypeVar, right: &TypeVar) {
// first, we extend the left constraints right the right
let right_constraints = self.get_or_create(right).to_owned();
self.get_or_create(left).extend(right_constraints);
let left_index = match self.reference_by_type.get(left) {
Some(i) => i.clone(),
None => panic!("should not be able to reach here, as index should always exist after call to get_or_create"),
};
let right_index = match self.reference_by_type.get(right) {
Some(i) => i.clone(),
None => panic!("should not be able to reach here, as index should always exist after call to get_or_create"),
};
// remap right type to the left index.
self.reference_by_type.insert(right.clone(), left_index);
// delete the reference to the right vector.
self.constraints_by_reference[right_index].clear();
}
}
impl<T> ConstraintSet<T>
where
T: Clone + PartialEq + Debug,
{
pub fn new() -> ConstraintSet<T> {
ConstraintSet {
constraints: vec![],
types: vec![],
}
}
// create a new type variable.
pub fn create_type_var(&mut self) -> Rc<TypeVar> {
let var = Rc::new(self.types.len());
self.types.push(var.clone());
var
}
}
/// stores the final result of the type inference algorithm.
pub type SubstitutionSet<T> = HashMap<Rc<TypeVar>, Option<T>>;
|
// unihernandez22
// https://codeforces.com/problemset/problem/1335/A
// math
use std::io;
fn main() {
let mut t = String::new();
io::stdin().read_line(&mut t).unwrap();
let t: i64 = t.trim().parse().unwrap();
for _ in 0..t {
let mut n = String::new();
io::stdin().read_line(&mut n).unwrap();
let n: i64 = n.trim().parse().unwrap();
println!("{}", (n-1)/2);
}
}
|
use ckb_tool::ckb_types::{
packed::{CellInput, OutPoint},
prelude::*,
H256,
};
#[derive(Hash, Eq, PartialEq, Debug, Clone)]
pub struct LiveCell {
pub tx_hash: H256,
pub index: u32,
pub capacity: u64,
pub mature: bool,
}
impl LiveCell {
pub fn out_point(&self) -> OutPoint {
OutPoint::new(self.tx_hash.clone().pack(), self.index)
}
pub fn input(&self) -> CellInput {
CellInput::new(self.out_point(), 0)
}
}
|
use crate::{
btree::{BTree, BTreeKey},
checkpoint, read_block, read_obj_phys,
superblock::NxSuperblock,
BlockAddr, ObjPhys, ObjectIdentifier, ObjectTypeAndFlags, TransactionIdentifier,
};
use fal::parsing::{read_u32, read_u64};
use std::{cmp::Ordering, collections::HashMap};
use bitflags::bitflags;
#[derive(Debug)]
pub struct OmapPhys {
pub header: ObjPhys,
pub flags: u32,
pub snapshot_count: u32,
pub tree_type: ObjectTypeAndFlags,
pub snapshot_tree_type: ObjectTypeAndFlags,
pub tree_oid: ObjectIdentifier,
pub snapshot_tree_oid: ObjectIdentifier,
pub most_recent_snapshot: TransactionIdentifier,
pub pending_revert_min: TransactionIdentifier,
pub pending_revert_max: TransactionIdentifier,
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct OmapKey {
pub oid: ObjectIdentifier,
pub xid: TransactionIdentifier,
}
impl OmapKey {
pub const LEN: usize = 16;
pub fn parse(bytes: &[u8]) -> Self {
let mut offset = 0;
Self {
oid: read_u64(bytes, &mut offset).into(),
xid: read_u64(bytes, &mut offset),
}
}
pub fn partial(oid: ObjectIdentifier) -> Self {
Self { oid, xid: 0 }
}
pub fn compare(k1: &BTreeKey, k2: &BTreeKey) -> Ordering {
Ord::cmp(k1, k2)
}
// Skip the xids. Useful when you want to find the key with the greatest xid.
pub fn compare_partial(k1: &BTreeKey, k2: &BTreeKey) -> Ordering {
Ord::cmp(
&k1.as_omap_key().unwrap().oid,
&k2.as_omap_key().unwrap().oid,
)
}
}
impl PartialOrd for OmapKey {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for OmapKey {
fn cmp(&self, other: &Self) -> Ordering {
self.oid.cmp(&other.oid).then(self.xid.cmp(&other.xid))
}
}
bitflags! {
pub struct OmapValueFlags: u32 {
const DELETED = 0x1;
const SAVED = 0x2;
const ENCRYPTED = 0x4;
const NOHEADER = 0x8;
const CRYPTO_GENERATION = 0x10;
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub struct OmapValue {
pub flags: OmapValueFlags,
pub size: u32,
pub paddr: BlockAddr,
}
impl OmapValue {
pub const LEN: usize = 16;
pub fn parse(bytes: &[u8]) -> Self {
let mut offset = 0;
Self {
flags: OmapValueFlags::from_bits(read_u32(bytes, &mut offset)).unwrap(),
size: read_u32(bytes, &mut offset),
paddr: read_u64(bytes, &mut offset) as i64,
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct OmapSnapshot {
pub flags: u32,
pub padding: u32,
pub oid: ObjectIdentifier,
}
impl OmapPhys {
pub fn parse(bytes: &[u8]) -> Self {
let mut offset = 0;
Self {
header: read_obj_phys(bytes, &mut offset),
flags: read_u32(bytes, &mut offset),
snapshot_count: read_u32(bytes, &mut offset),
tree_type: ObjectTypeAndFlags::from_raw(read_u32(bytes, &mut offset)),
snapshot_tree_type: ObjectTypeAndFlags::from_raw(read_u32(bytes, &mut offset)),
tree_oid: read_u64(bytes, &mut offset).into(),
snapshot_tree_oid: read_u64(bytes, &mut offset).into(),
most_recent_snapshot: read_u64(bytes, &mut offset),
pending_revert_min: read_u64(bytes, &mut offset),
pending_revert_max: read_u64(bytes, &mut offset),
}
}
}
#[derive(Debug)]
pub struct Omap {
pub omap: OmapPhys,
pub tree: BTree,
}
impl Omap {
pub fn load<D: fal::Device>(
device: &mut D,
superblock: &NxSuperblock,
baddr: BlockAddr,
) -> Self {
let block = read_block(superblock, device, baddr);
let omap = OmapPhys::parse(&block);
if omap.tree_type.is_virtual() {
unimplemented!("Omaps with virtual addresses aren't implemented");
}
let tree = BTree::load(device, superblock, omap.tree_oid.0 as i64);
Self { omap, tree }
}
pub fn get<D: fal::Device>(
&self,
device: &mut D,
superblock: &NxSuperblock,
key: OmapKey,
) -> Option<OmapValue> {
// Make sure that we aren't using a partial key accidentally.
debug_assert_ne!(key.xid, 0);
self.tree
.get(
device,
superblock,
Resolver::Physical,
&BTreeKey::OmapKey(key),
)
.map(|v| v.into_omap_value().unwrap())
}
pub fn get_partial<'a, D: fal::Device>(
&'a self,
device: &'a mut D,
superblock: &'a NxSuperblock,
key: OmapKey,
) -> Option<impl Iterator<Item = (OmapKey, OmapValue)> + 'a> {
self.tree
.similar_pairs(
device,
superblock,
Resolver::Virtual(self),
&BTreeKey::OmapKey(key),
OmapKey::compare_partial,
)
.map(|iter| {
iter.map(|(k, v)| (k.into_omap_key().unwrap(), v.into_omap_value().unwrap()))
})
}
pub fn get_partial_latest<'a, D: fal::Device>(
&'a self,
device: &'a mut D,
superblock: &'a NxSuperblock,
key: OmapKey,
) -> Option<(OmapKey, OmapValue)> {
match self.get_partial(device, superblock, key).map(|iter| {
iter.filter(|(k, _)| {
OmapKey::compare_partial(&BTreeKey::OmapKey(*k), &BTreeKey::OmapKey(key))
!= Ordering::Greater
})
.max_by_key(|(k, _)| k.xid)
}) {
Some(Some(t)) => Some(t),
_ => None,
}
}
}
pub type EphemeralMap = HashMap<ObjectIdentifier, checkpoint::GenericObject>;
#[derive(Clone, Copy, Debug)]
pub enum Resolver<'a, 'b> {
Physical,
Virtual(&'a Omap),
Ephemeral(&'b EphemeralMap),
}
impl Resolver<'_, '_> {
pub fn is_physical(self) -> bool {
if let Self::Physical = self {
true
} else {
false
}
}
pub fn is_virtual(self) -> bool {
if let Self::Virtual(_) = self {
true
} else {
false
}
}
pub fn is_ephemeral(self) -> bool {
if let Self::Ephemeral(_) = self {
true
} else {
false
}
}
}
|
use crate::{BotResult, CommandData, Context};
use std::sync::Arc;
#[command]
#[short_desc("https://youtu.be/hjGZLnja1o8?t=41")]
#[bucket("songs")]
#[aliases("1273")]
#[no_typing()]
pub async fn rockefeller(ctx: Arc<Context>, data: CommandData) -> BotResult<()> {
let (lyrics, delay) = _rockefeller();
super::song_send(lyrics, delay, ctx, data).await
}
pub fn _rockefeller() -> (&'static [&'static str], u64) {
let lyrics = &[
"1 - 2 - 7 - 3",
"down the Rockefeller street.",
"Life is marchin' on, do you feel that?",
"1 - 2 - 7 - 3",
"down the Rockefeller street.",
"Everything is more than surreal",
];
(lyrics, 2250)
}
|
use util::{bitfields, primitive_enum};
#[derive(Copy, Clone, Default)]
pub struct DMARegisters {
pub source: DMAAddress,
pub destination: DMAAddress,
pub count: u16,
pub control: DMAControl,
}
bitfields! {
pub struct DMAAddress: u32 {
[0,15] lo, set_lo: u16,
[16,31] hi, set_hi: u16,
}
}
bitfields! {
/// 40000BAh - DMA0CNT_H - DMA 0 Control (R/W)
/// 40000C6h - DMA1CNT_H - DMA 1 Control (R/W)
/// 40000D2h - DMA2CNT_H - DMA 2 Control (R/W)
/// 40000DEh - DMA3CNT_H - DMA 3 Control (R/W)
/// Bit Expl.
/// 0-4 Not used
/// 5-6 Dest Addr Control (0=Increment,1=Decrement,2=Fixed,3=Increment/Reload)
/// 7-8 Source Adr Control (0=Increment,1=Decrement,2=Fixed,3=Prohibited)
/// 9 DMA Repeat (0=Off, 1=On) (Must be zero if Bit 11 set)
/// 10 DMA Transfer Type (0=16bit, 1=32bit)
/// 11 Game Pak DRQ - DMA3 only - (0=Normal, 1=DRQ <from> Game Pak, DMA3)
/// 12-13 DMA Start Timing (0=Immediately, 1=VBlank, 2=HBlank, 3=Special)
/// The 'Special' setting (Start Timing=3) depends on the DMA channel:
/// DMA0=Prohibited, DMA1/DMA2=Sound FIFO, DMA3=Video Capture
/// 14 IRQ upon end of Word Count (0=Disable, 1=Enable)
/// 15 DMA Enable (0=Off, 1=On)
pub struct DMAControl: u16 {
[5,6] dst_addr_control, set_dst_addr_control: AddressControl,
[7,8] src_addr_control, set_src_addr_control: AddressControl,
[9] repeat, set_repeat: bool,
[10] transfer_type, set_transfer_type: TransferType,
[11] gamepak_drq, set_gamepak_drq: bool,
[12,13] timing, set_timing: Timing,
[14] irq, set_irq: bool,
[15] enabled, set_enabled: bool,
}
}
primitive_enum! {
pub enum Timing: u16 {
Immediate = 0,
VBlank,
HBlank,
Special,
SoundFifo,
VideoCapture,
}
}
primitive_enum! {
pub enum AddressControl: u16 {
Increment = 0,
Decrement,
Fixed,
IncrementReload,
}
}
primitive_enum! {
pub enum TransferType: u16 {
Halfword = 0,
Word = 1,
}
}
|
use oxygengine::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum ItemKind {
Star,
Shield,
}
impl Prefab for ItemKind {}
impl PrefabComponent for ItemKind {}
impl ItemKind {
pub fn image(self) -> &'static str {
match self {
Self::Star => "images/item-star.svg",
Self::Shield => "images/item-shield.svg",
}
}
pub fn build_image(self, size: Scalar) -> Image<'static> {
Image::new(self.image())
.destination(Some([0.0, 0.0, size, size].into()))
.align([0.5, 0.5].into())
}
}
|
use core::ops::AddAssign;
use super::drawable::Drawable;
use super::image::Image;
use super::canvas::Canvas;
/// Use a Sprite for an object on your game which can move.
///
/// # Example
///
/// ```rust
/// use wasm_game_lib::graphics::image::Image;
/// use wasm_game_lib::graphics::sprite::Sprite;
/// # use wasm_game_lib::graphics::window::Window;
/// # async fn test() {
/// # let (window, mut canvas) = Window::init();
///
/// // load a texture from the web
/// let ferris_texture = Image::load("https://rustacean.net/assets/cuddlyferris.svg").await.unwrap();
///
/// // create a sprite
/// let ferris = Sprite::<u32>::new((0,0), &ferris_texture, (0,0));
///
/// // draw the sprite on a canvas
/// canvas.draw(&ferris);
/// # }
pub struct Sprite<'a, T: Into<f64> + Copy + AddAssign> {
/// The texture of the Sprite
pub texture: &'a Image,
/// Where the Sprite is located on the screen
pub coords: (T, T),
/// The point on the texture which is considered as the center of the Sprite.
/// The coordinates of this point must be relative to the top-left corner of the object.
pub origin: (T, T)
}
impl<'a, T: Into<f64> + Copy + AddAssign> Sprite<'a, T> {
/// Create a new Sprite.
///
/// # Example
///
/// ```rust
/// use wasm_game_lib::graphics::image::Image;
/// use wasm_game_lib::graphics::sprite::Sprite;
/// # async fn test() {
/// // load a texture from the web
/// let ferris_texture = Image::load("https://rustacean.net/assets/cuddlyferris.svg").await.unwrap();
///
/// // create a sprite
/// let ferris = Sprite::<u32>::new((0,0), &ferris_texture, (0,0));
/// # }
/// ```
pub fn new(coords: (T, T), texture: &Image, origin: (T, T)) -> Sprite<T> {
Sprite {
coords,
texture,
origin
}
}
/// Set the origin.
/// The origin is the point on the texture which is considered as the center of the Sprite.
/// The coordinates of this point must be relative to the top-left corner of the object.
pub fn set_origin(&mut self, origin: (T, T)) {
self.origin = origin
}
/// Return the origin.
/// The origin is the point on the texture which is considered as the center of the Sprite.
/// The coordinates of this point must be relative to the top-left corner of the object.
pub fn get_origin(&self) -> (T, T) {
self.origin
}
/// Return the texture.
pub fn get_texture(&self) -> &Image {
self.texture
}
/// Set the texture
pub fn set_texture(&mut self, texture: &'a Image) {
self.texture = texture
}
/// Set the x coordinate.
pub fn set_x(&mut self, x: T) {
self.coords.0 = x;
}
/// Set the y coordinate.
pub fn set_y(&mut self, y: T) {
self.coords.1 = y;
}
/// Set the coordinates.
pub fn set_coords(&mut self, coords: (T, T)) {
self.coords = coords;
}
/// Return the x coordinate.
pub fn get_x(&mut self) -> T {
self.coords.0
}
/// Return the y coordinate.
pub fn get_y(&mut self) -> T {
self.coords.1
}
/// Return the coordinates.
pub fn get_coords(&mut self) -> (T, T) {
self.coords
}
/// Add a value to the actual coordinates.
///
/// # Example
///
/// ```rust
/// use wasm_game_lib::graphics::image::Image;
/// use wasm_game_lib::graphics::sprite::Sprite;
/// # async fn test() {
/// # let texture = Image::load("https://rustacean.net/assets/cuddlyferris.svg").await.unwrap();
/// # let mut sprite = Sprite::<u32>::new((0,0), &texture, (0,0));
/// // move a sprite one pixel right and two pixels down.
/// sprite.move_by((1,2));
/// # }
/// ```
pub fn move_by(&mut self, movement: (T, T)) {
self.coords.0 += movement.0;
self.coords.1 += movement.1;
}
}
impl<'a, T: Into<f64> + Copy + AddAssign> Drawable for Sprite<'a, T> {
fn draw_on_canvas(&self, canvas: &mut Canvas) {
canvas.draw_image((self.coords.0.into() - self.origin.0.into(), self.coords.1.into() - self.origin.1.into()), &self.texture);
}
} |
use std::io::prelude::*;
use std::net::TcpStream;
fn main() {
let mut stream = TcpStream::connect("127.0.0.1:4369")
.expect("Couldn't connect to the server...");
connect(&mut stream);
// ask_port(&mut stream);
// let mut stream2 = TcpStream::connect("127.0.0.1:4369")
// .expect("Couldn't connect to the server...");
ask_names(&mut stream);
}
fn connect(stream: &mut TcpStream) {
let s_port = format!("{:x}", stream.local_addr().unwrap().port());
println!("port {:?}", stream.local_addr().unwrap().port());
let mut bufVec: [u8; 22] = [0; 22];
bufVec[0] = 0;
bufVec[1] = 20;
bufVec[2] = "x".as_bytes()[0];
bufVec[3] = s_port.clone().into_bytes()[0];
bufVec[4] = s_port.into_bytes()[1];
bufVec[5] = 77;
bufVec[6] = 0;
bufVec[7] = 0;
bufVec[8] = 5;
bufVec[9] = 0;
bufVec[10] = 5;
bufVec[11] = 0;
bufVec[12] = 7;
bufVec[13] = "k".as_bytes()[0];
bufVec[14] = "r".as_bytes()[0];
bufVec[15] = "i".as_bytes()[0];
bufVec[16] = "s".as_bytes()[0];
bufVec[17] = "h".as_bytes()[0];
bufVec[18] = "n".as_bytes()[0];
bufVec[19] = "a".as_bytes()[0];
bufVec[20] = 0;
bufVec[21] = 0;
stream.write_all(&bufVec).unwrap();
println!("wrote ");
// let mut r_buf = String::new();
let mut r_array: [u8; 4] = [0; 4];
let r_len = stream.read(&mut r_array).unwrap();
println!("read {:?} of length {}", r_array, r_len);
}
fn ask_port(stream: &mut TcpStream) {
let mut buf_vec: [u8; 10] = [0; 10];
buf_vec[0] = 0;
buf_vec[1] = 10;
buf_vec[2] = "z".as_bytes()[0];
buf_vec[3] = "k".as_bytes()[0];
buf_vec[4] = "r".as_bytes()[0];
buf_vec[5] = "i".as_bytes()[0];
buf_vec[6] = "s".as_bytes()[0];
buf_vec[7] = "h".as_bytes()[0];
buf_vec[8] = "n".as_bytes()[0];
buf_vec[9] = "a".as_bytes()[0];
stream.write_all(&buf_vec).unwrap();
println!("asked");
let mut r_array: [u8; 4] = [0; 4];
let r_len = stream.read(&mut r_array).unwrap();
println!("ask - read {:?} of length {}", r_array, r_len);
}
fn ask_names(stream: &mut TcpStream) {
let mut buf_vec: [u8; 3] = [0; 3];
buf_vec[0] = 0;
buf_vec[1] = 1;
buf_vec[2] = "n".as_bytes()[0];
stream.write_all(&buf_vec).unwrap();
println!("asked names");
let mut r_array: [u8; 30] = [0; 30];
let mut r_str = String::new();
// let r_len = stream.read(&mut r_array).unwrap();
let r_len = stream.read_to_string(&mut r_str).unwrap();
println!("ask - read names {:?} of length {}", r_str, r_len);
} |
pub mod black_hole;
mod accretion; |
#[macro_use]
extern crate lazy_static;
extern crate wars_8_api;
use std::sync::{Mutex, MutexGuard};
use gfx::{ColorPallete, print, printh, rectfill};
use wars_8_api::*;
lazy_static! {
static ref LINES: Mutex<Vec<(ColorPallete, String)>> = Mutex::new(Vec::new());
static ref CRS_OFFSET: Mutex<i32> = Mutex::new(0);
}
#[no_mangle]
pub fn _init() {
gfx::printh("[WARS-8-Terminal] Starting!".to_string());
LINES.lock().unwrap().push((ColorPallete::Pink, String::new())) ;
}
pub fn process_command(mutex: &mut MutexGuard<Vec<(ColorPallete, String)>>) {
let (_, cmd_str) = mutex.last().unwrap();
let cmd_str = cmd_str.to_lowercase();
let split_cmd = cmd_str.split(' ').collect::<Vec<&str>>();
match split_cmd[0] {
"echo" => {
match cmd_str.find(' ') {
Some(idx) => {
mutex.push((ColorPallete::White, cmd_str[(idx + 1)..].to_string()));
},
None => { }
}
},
"exit" => {
misc::exit();
},
"help" => {
mutex.push((ColorPallete::Blue, "help echo exit".to_string()));
mutex.push((ColorPallete::Pink, "load unload".to_string()));
},
"load" => {
match cmd_str.find(' ') {
Some(idx) => {
misc::load(cmd_str[(idx + 1)..].to_string());
},
None => { }
}
},
"unload" => {
misc::unload();
},
_ => {
mutex.push((ColorPallete::Red, "INVALID COMMAND".to_string()));
}
}
}
#[no_mangle]
pub fn _update() {
let mut lines_mutex = LINES.lock().unwrap();
let mut key = input::key();
let mut crs_offset = CRS_OFFSET.lock().unwrap();
while key != input::Scancode::None {
let mut last = (lines_mutex.last().unwrap()).clone();
if key == input::Scancode::BACKSPACE {
last.1.pop();
*(lines_mutex.last_mut().unwrap()) = (last.0, last.1);
*crs_offset -= 1;
} else if *crs_offset != 32 && key != input::Scancode::RETURN {
*(lines_mutex.last_mut().unwrap()) = (last.0, last.1 + char::from(key).to_string().as_str());
*crs_offset += 1;
}
if key == input::Scancode::RETURN {
*crs_offset = 0;
process_command(&mut lines_mutex);
lines_mutex.push((ColorPallete::Pink, String::new()));
}
key = input::key();
}
let lines_len = lines_mutex.len();
if lines_len > 21 {
for _ in 0..(lines_len - 21) {
lines_mutex.remove(0);
}
}
}
#[no_mangle]
pub fn _draw() {
rectfill(0, 0, 127, 127, ColorPallete::Black);
let lines_mutex = LINES.lock().unwrap();
let lines_len = lines_mutex.len();
for row in 0..lines_len {
let y = 127 - (6 * row) - 1;
let row_content = lines_mutex[(lines_len - 1) - row].clone();
print(row_content.1, 0, y as i32, row_content.0);
}
} |
use std::collections::HashMap;
use reqwest::header;
use std::thread;
fn main() {
let content = lane::fs::read_content("./orders.txt");
// println!("{:?}", content);
let ves: Vec<&str> = content.split(",").collect();
let per_vec = lane::slice_per_vec(ves, 50);
// println!("总计: {}", per_vec.len());
let count = per_vec.len();
let mut r: i32 = 1;
for ids in per_vec {
let mut params: HashMap<String, String> = HashMap::new();
params.insert("method".to_owned(), "vast.order.plan.receivingorderbyorderid".to_owned());
params.insert("v".to_owned(), "2.0".to_owned());
params.insert("orderid".to_owned(), ids.join(",").to_owned());
println!("{:?}", ids.join(","));
let x = post("Route.axd", params);
match x {
Ok(x) => println!("Success {:?}\n", x),
Err(e) => println!("Error {:?}", e)
}
println!("第{:?}/{:?},执行完成", r, count);
r += 1;
thread::sleep(std::time::Duration::from_secs(1));
}
println!("excute complete!")
}
/// 默认user_agent
const DEFAULT_USER_AGENT: &'static str = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3534.4 Safari/537.36";
// Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.5 Safari/605.1.15
/// 采用post方式请求数据
pub(crate) fn post(url: &str, params: HashMap<String, String>) -> Result<String, std::io::Error> {
match reqwest::blocking::Client::new()
.post(url)
.header(header::USER_AGENT, DEFAULT_USER_AGENT)
.form(¶ms)
.send()
{
Ok(res) => {
if res.status() == 200 {
match res.text() {
Ok(txt) => Ok(txt),
Err(e) => Err(std::io::Error::new(
std::io::ErrorKind::Other,
format!("{:?}", e),
)),
}
} else {
Err(std::io::Error::new(std::io::ErrorKind::Other, "error"))
}
}
Err(e) => Err(std::io::Error::new(
std::io::ErrorKind::Other,
format!("{:?}", e),
)),
}
} |
use std::fmt;
#[derive(Eq, PartialEq, Hash, Clone)]
pub enum Yaku {
Poetry,
Flowers,
Purple,
Inoshikachou,
Moon,
Salamander,
Gramps,
Month,
None,
}
impl fmt::Display for Yaku {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let value = match self {
&Yaku::Poetry => "Poetry",
&Yaku::Flowers => "Flowers",
&Yaku::Purple => "Purple",
&Yaku::Inoshikachou => "Inoshikachou",
&Yaku::Moon => "Moon",
&Yaku::Salamander => "Salamander",
&Yaku::Gramps => "Gramps",
&Yaku::Month => "Month",
&Yaku::None => "None",
};
write!(f, "{}", value)
}
}
pub struct Card {
pub month: usize,
pub points: usize,
pub yaku: Yaku,
}
impl Card {
pub fn new_yaku(month: usize, points: usize, yaku: Yaku) -> Card {
Card{
month: month,
points: points,
yaku: yaku,
}
}
pub fn new(month: usize, points: usize) -> Card {
Card{
month: month,
points: points,
yaku: Yaku::None,
}
}
pub fn get_info(&self) -> String {
format!("M: {}, P: {}, Y: {}", self.month, self.points, self.yaku)
}
}
pub fn get_cards() -> Vec<Card> {
let mut cards: Vec<Card> = Vec::new();
// January
cards.push(Card::new(1, 1));
cards.push(Card::new(1, 1));
cards.push(Card::new_yaku(1, 5, Yaku::Poetry));
cards.push(Card::new(1, 20));
// February
cards.push(Card::new(2, 1));
cards.push(Card::new(2, 1));
cards.push(Card::new_yaku(2, 5, Yaku::Poetry));
cards.push(Card::new(2, 10));
// March
cards.push(Card::new(3, 1));
cards.push(Card::new(3, 1));
cards.push(Card::new_yaku(3, 5, Yaku::Poetry));
cards.push(Card::new_yaku(3, 20, Yaku::Flowers));
// April
cards.push(Card::new(4, 1));
cards.push(Card::new(4, 1));
cards.push(Card::new(4, 5));
cards.push(Card::new(4, 10));
// May
cards.push(Card::new(5, 1));
cards.push(Card::new(5, 1));
cards.push(Card::new(5, 5));
cards.push(Card::new(5, 10));
// June
cards.push(Card::new(6, 1));
cards.push(Card::new(6, 1));
cards.push(Card::new_yaku(6, 5, Yaku::Purple));
cards.push(Card::new_yaku(6, 10, Yaku::Inoshikachou));
// July
cards.push(Card::new(7, 1));
cards.push(Card::new(7, 1));
cards.push(Card::new(7, 5));
cards.push(Card::new_yaku(7, 20, Yaku::Inoshikachou));
// August
cards.push(Card::new(8, 1));
cards.push(Card::new(8, 1));
cards.push(Card::new(8, 10));
cards.push(Card::new_yaku(8, 20, Yaku::Moon));
// September
cards.push(Card::new(9, 1));
cards.push(Card::new(9, 1));
cards.push(Card::new_yaku(9, 5, Yaku::Purple));
cards.push(Card::new_yaku(9, 10, Yaku::Salamander));
// October
cards.push(Card::new(10, 1));
cards.push(Card::new(10, 1));
cards.push(Card::new_yaku(10, 5, Yaku::Purple));
cards.push(Card::new_yaku(10, 10, Yaku::Inoshikachou));
// November
cards.push(Card::new(11, 1));
cards.push(Card::new(11, 5));
cards.push(Card::new(11, 10));
cards.push(Card::new_yaku(11, 20, Yaku::Gramps));
// December
cards.push(Card::new(12, 1));
cards.push(Card::new(12, 1));
cards.push(Card::new(12, 1));
cards.push(Card::new(12, 20));
cards
}
|
// We can use generics to create defintions for items like function signatures
// or structs, which we can then use with many diff concrete data types
// ***In Function Definitions**
// Place generics in the signature of the function where we would usually
// specify the data types of the parameters and the return value
fn largest_i32(list: &[i32]) -> i32 {
let mut largest = list[0];
for &item in list.iter() {
if item > largest {
largest = item;
}
}
largest
}
fn largest_char(list: &[char]) -> char {
let mut largest = list[0];
for &item in list.iter() {
if item > largest {
largest = item;
}
}
largest
}
// Read this function as:
// the function "largest" is generic over some type "T"
// This function has one parameter named "list", which is a slice of values
// of type "T"
// The largest function with return a value of the same type "T"
// fn largest<T>(list: &[T]) -> T {
// let mut largest = list[0];
// for &item in list.iter() {
// if item > largest {
// largest = item;
// }
// }
// largest
// }
// ***In Struct Defintions**
// x and y have to be of the same type T
struct Point<T> {
x: T,
y: T
}
struct PointTwo<T, U> {
x: T,
y: U
}
// ***In Enum Definitions***
// This is an enum that is a generic over type T and has two variants
// "Some" which holds one value of type "T"
// "None" variant that doesn't hold any value
// By using the Option enum, we can express the abstract concept of having
// an optional value
enum Option<T> {
Some(T),
None
}
// Enums can use multiple generic types as well
// Result enum is generic over two types, "T" and "E"
// It has two variants: "Ok" which holds a value of type "T"
// and "Err" which holds a value of type "E"
enum Result <T, E> {
Ok(T),
Err(E)
}
// ***In Method Definitions***
// Have to declare "T" just after "impl" so we can use it to specify that
// we're implementing methods on the type Point<T>
// By declaring "T" as a generic type after "impl", Rust can identify that
// the type in the angle brackets in "Point" is a generic type and not
// a concrete type
impl<T> Point <T> {
fn x(&self) -> &T {
&self.x
}
}
// This is the same as the above
// impl<U> Point <U> {
// fn x(&self) -> &U {
// &self.x
// }
// }
// We could remove the "T" after "impl" by using a concrete type like the below
// impl Point<f32> {
// fn distance_from_origin(&self) -> f32 {
// (self.x.powi(2) + self.y.powi(2)).sqrt()
// }
// }
fn main() {
// let number_list = vec![34, 50, 25, 100, 65];
// let result = largest_i32(&number_list);
// println!("The largest number is {}", result);
// let char_list = vec!['y', 'm', 'a', 'q'];
// let result = largest_char(&char_list);
// println!("The largest char is {}", result);
// let number_list = vec![34, 50, 25, 100, 65];
// let result = largest(&number_list);
// println!("The largest number is {}", result);
// let char_list = vec!['y', 'm', 'a', 'q'];
// let result = largest(&char_list);
// println!("The largest char is {}", result);
let integer = PointTwo { x: 5, y: 10 };
let float = Point { x: 1.0, y: 4.0 };
// Won't work because x and y are different types
// let wont_work = Point { x: 5, y: 4.0 }
// Now x and y can be of different types or the same type
let will_work = PointTwo { x: 5, y: 4.0 };
let p = Point { x: 5, y: 10 };
println!("=======");
println!("p.x = {}", p.x());
}
|
use std::fs::File;
use std::io::{BufReader, BufWriter};
use crate::cartridge::{MirrorMode, Rom, RomMapper};
use crate::savable::Savable;
use super::Mapper;
pub struct Mapper7 {
rom: Rom,
bank: usize,
mirror_mode: MirrorMode,
}
impl Mapper7 {
pub fn new(rom: Rom) -> Self {
Self {
rom,
bank: 0,
mirror_mode: MirrorMode::OneScreenLo,
}
}
}
impl RomMapper for Mapper7 {}
impl Savable for Mapper7 {
fn save(&self, output: &mut BufWriter<File>) -> bincode::Result<()> {
self.rom.save(output)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.bank)?;
bincode::serialize_into::<&mut BufWriter<File>, _>(output, &self.mirror_mode)?;
Ok(())
}
fn load(&mut self, input: &mut BufReader<File>) -> bincode::Result<()> {
self.rom.load(input)?;
self.bank = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
self.mirror_mode = bincode::deserialize_from::<&mut BufReader<File>, _>(input)?;
Ok(())
}
}
impl Mapper for Mapper7 {
fn read_prg(&mut self, addr: u16) -> u8 {
let index = self.bank * 0x8000 + (addr & 0x7FFF) as usize;
self.rom.prg[index]
}
fn write_prg(&mut self, addr: u16, data: u8) {
if let 0x8000..=0xFFFF = addr {
self.bank = (data & 0x7) as usize;
match (data >> 4) & 0x1 != 0 {
true => self.mirror_mode = MirrorMode::OneScreenHi,
false => self.mirror_mode = MirrorMode::OneScreenLo,
}
}
}
fn read_chr(&mut self, addr: u16) -> u8 {
self.rom.chr[addr as usize]
}
fn write_chr(&mut self, addr: u16, data: u8) {
if self.rom.header.chr_count() == 0 {
self.rom.chr[addr as usize] = data;
}
}
fn mirror_mode(&self) -> crate::cartridge::MirrorMode {
self.mirror_mode
}
fn reset(&mut self) {
self.bank = 0;
self.mirror_mode = MirrorMode::OneScreenLo;
}
}
|
#[doc = "Register `CR` reader"]
pub type R = crate::R<CR_SPEC>;
#[doc = "Register `CR` writer"]
pub type W = crate::W<CR_SPEC>;
#[doc = "Field `LPSDSR` reader - Low-power deepsleep/Sleep/Low-power run"]
pub type LPSDSR_R = crate::BitReader<LPSDSR_A>;
#[doc = "Low-power deepsleep/Sleep/Low-power run\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LPSDSR_A {
#[doc = "0: Voltage regulator on during Deepsleep/Sleep/Low-power run mode"]
MainMode = 0,
#[doc = "1: Voltage regulator in low-power mode during Deepsleep/Sleep/Low-power run mode"]
LowPowerMode = 1,
}
impl From<LPSDSR_A> for bool {
#[inline(always)]
fn from(variant: LPSDSR_A) -> Self {
variant as u8 != 0
}
}
impl LPSDSR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPSDSR_A {
match self.bits {
false => LPSDSR_A::MainMode,
true => LPSDSR_A::LowPowerMode,
}
}
#[doc = "Voltage regulator on during Deepsleep/Sleep/Low-power run mode"]
#[inline(always)]
pub fn is_main_mode(&self) -> bool {
*self == LPSDSR_A::MainMode
}
#[doc = "Voltage regulator in low-power mode during Deepsleep/Sleep/Low-power run mode"]
#[inline(always)]
pub fn is_low_power_mode(&self) -> bool {
*self == LPSDSR_A::LowPowerMode
}
}
#[doc = "Field `LPSDSR` writer - Low-power deepsleep/Sleep/Low-power run"]
pub type LPSDSR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LPSDSR_A>;
impl<'a, REG, const O: u8> LPSDSR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Voltage regulator on during Deepsleep/Sleep/Low-power run mode"]
#[inline(always)]
pub fn main_mode(self) -> &'a mut crate::W<REG> {
self.variant(LPSDSR_A::MainMode)
}
#[doc = "Voltage regulator in low-power mode during Deepsleep/Sleep/Low-power run mode"]
#[inline(always)]
pub fn low_power_mode(self) -> &'a mut crate::W<REG> {
self.variant(LPSDSR_A::LowPowerMode)
}
}
#[doc = "Field `PDDS` reader - Power down deepsleep"]
pub type PDDS_R = crate::BitReader<PDDS_A>;
#[doc = "Power down deepsleep\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PDDS_A {
#[doc = "0: Enter Stop mode when the CPU enters deepsleep"]
StopMode = 0,
#[doc = "1: Enter Standby mode when the CPU enters deepsleep"]
StandbyMode = 1,
}
impl From<PDDS_A> for bool {
#[inline(always)]
fn from(variant: PDDS_A) -> Self {
variant as u8 != 0
}
}
impl PDDS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PDDS_A {
match self.bits {
false => PDDS_A::StopMode,
true => PDDS_A::StandbyMode,
}
}
#[doc = "Enter Stop mode when the CPU enters deepsleep"]
#[inline(always)]
pub fn is_stop_mode(&self) -> bool {
*self == PDDS_A::StopMode
}
#[doc = "Enter Standby mode when the CPU enters deepsleep"]
#[inline(always)]
pub fn is_standby_mode(&self) -> bool {
*self == PDDS_A::StandbyMode
}
}
#[doc = "Field `PDDS` writer - Power down deepsleep"]
pub type PDDS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PDDS_A>;
impl<'a, REG, const O: u8> PDDS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Enter Stop mode when the CPU enters deepsleep"]
#[inline(always)]
pub fn stop_mode(self) -> &'a mut crate::W<REG> {
self.variant(PDDS_A::StopMode)
}
#[doc = "Enter Standby mode when the CPU enters deepsleep"]
#[inline(always)]
pub fn standby_mode(self) -> &'a mut crate::W<REG> {
self.variant(PDDS_A::StandbyMode)
}
}
#[doc = "Field `CWUF` reader - Clear wakeup flag"]
pub type CWUF_R = crate::BitReader<CWUFW_A>;
#[doc = "Clear wakeup flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CWUFW_A {
#[doc = "1: Clear the WUF Wakeup flag after 2 system clock cycles"]
Clear = 1,
}
impl From<CWUFW_A> for bool {
#[inline(always)]
fn from(variant: CWUFW_A) -> Self {
variant as u8 != 0
}
}
impl CWUF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CWUFW_A> {
match self.bits {
true => Some(CWUFW_A::Clear),
_ => None,
}
}
#[doc = "Clear the WUF Wakeup flag after 2 system clock cycles"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CWUFW_A::Clear
}
}
#[doc = "Field `CWUF` writer - Clear wakeup flag"]
pub type CWUF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CWUFW_A>;
impl<'a, REG, const O: u8> CWUF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear the WUF Wakeup flag after 2 system clock cycles"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CWUFW_A::Clear)
}
}
#[doc = "Field `CSBF` reader - Clear standby flag"]
pub type CSBF_R = crate::BitReader<CSBFW_A>;
#[doc = "Clear standby flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CSBFW_A {
#[doc = "1: Clear the SBF Standby flag"]
Clear = 1,
}
impl From<CSBFW_A> for bool {
#[inline(always)]
fn from(variant: CSBFW_A) -> Self {
variant as u8 != 0
}
}
impl CSBF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CSBFW_A> {
match self.bits {
true => Some(CSBFW_A::Clear),
_ => None,
}
}
#[doc = "Clear the SBF Standby flag"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CSBFW_A::Clear
}
}
#[doc = "Field `CSBF` writer - Clear standby flag"]
pub type CSBF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, CSBFW_A>;
impl<'a, REG, const O: u8> CSBF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear the SBF Standby flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(CSBFW_A::Clear)
}
}
#[doc = "Field `PVDE` reader - Power voltage detector enable"]
pub type PVDE_R = crate::BitReader<PVDE_A>;
#[doc = "Power voltage detector enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PVDE_A {
#[doc = "0: PVD Disabled"]
Disabled = 0,
#[doc = "1: PVD Enabled"]
Enabled = 1,
}
impl From<PVDE_A> for bool {
#[inline(always)]
fn from(variant: PVDE_A) -> Self {
variant as u8 != 0
}
}
impl PVDE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PVDE_A {
match self.bits {
false => PVDE_A::Disabled,
true => PVDE_A::Enabled,
}
}
#[doc = "PVD Disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == PVDE_A::Disabled
}
#[doc = "PVD Enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == PVDE_A::Enabled
}
}
#[doc = "Field `PVDE` writer - Power voltage detector enable"]
pub type PVDE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PVDE_A>;
impl<'a, REG, const O: u8> PVDE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "PVD Disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(PVDE_A::Disabled)
}
#[doc = "PVD Enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(PVDE_A::Enabled)
}
}
#[doc = "Field `PLS` reader - PVD level selection"]
pub type PLS_R = crate::FieldReader<PLS_A>;
#[doc = "PVD level selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum PLS_A {
#[doc = "0: 1.9 V"]
V19 = 0,
#[doc = "1: 2.1 V"]
V21 = 1,
#[doc = "2: 2.3 V"]
V23 = 2,
#[doc = "3: 2.5 V"]
V25 = 3,
#[doc = "4: 2.7 V"]
V27 = 4,
#[doc = "5: 2.9 V"]
V29 = 5,
#[doc = "6: 3.1 V"]
V31 = 6,
#[doc = "7: External input analog voltage (Compare internally to VREFINT)"]
External = 7,
}
impl From<PLS_A> for u8 {
#[inline(always)]
fn from(variant: PLS_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for PLS_A {
type Ux = u8;
}
impl PLS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PLS_A {
match self.bits {
0 => PLS_A::V19,
1 => PLS_A::V21,
2 => PLS_A::V23,
3 => PLS_A::V25,
4 => PLS_A::V27,
5 => PLS_A::V29,
6 => PLS_A::V31,
7 => PLS_A::External,
_ => unreachable!(),
}
}
#[doc = "1.9 V"]
#[inline(always)]
pub fn is_v1_9(&self) -> bool {
*self == PLS_A::V19
}
#[doc = "2.1 V"]
#[inline(always)]
pub fn is_v2_1(&self) -> bool {
*self == PLS_A::V21
}
#[doc = "2.3 V"]
#[inline(always)]
pub fn is_v2_3(&self) -> bool {
*self == PLS_A::V23
}
#[doc = "2.5 V"]
#[inline(always)]
pub fn is_v2_5(&self) -> bool {
*self == PLS_A::V25
}
#[doc = "2.7 V"]
#[inline(always)]
pub fn is_v2_7(&self) -> bool {
*self == PLS_A::V27
}
#[doc = "2.9 V"]
#[inline(always)]
pub fn is_v2_9(&self) -> bool {
*self == PLS_A::V29
}
#[doc = "3.1 V"]
#[inline(always)]
pub fn is_v3_1(&self) -> bool {
*self == PLS_A::V31
}
#[doc = "External input analog voltage (Compare internally to VREFINT)"]
#[inline(always)]
pub fn is_external(&self) -> bool {
*self == PLS_A::External
}
}
#[doc = "Field `PLS` writer - PVD level selection"]
pub type PLS_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O, PLS_A>;
impl<'a, REG, const O: u8> PLS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "1.9 V"]
#[inline(always)]
pub fn v1_9(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::V19)
}
#[doc = "2.1 V"]
#[inline(always)]
pub fn v2_1(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::V21)
}
#[doc = "2.3 V"]
#[inline(always)]
pub fn v2_3(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::V23)
}
#[doc = "2.5 V"]
#[inline(always)]
pub fn v2_5(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::V25)
}
#[doc = "2.7 V"]
#[inline(always)]
pub fn v2_7(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::V27)
}
#[doc = "2.9 V"]
#[inline(always)]
pub fn v2_9(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::V29)
}
#[doc = "3.1 V"]
#[inline(always)]
pub fn v3_1(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::V31)
}
#[doc = "External input analog voltage (Compare internally to VREFINT)"]
#[inline(always)]
pub fn external(self) -> &'a mut crate::W<REG> {
self.variant(PLS_A::External)
}
}
#[doc = "Field `DBP` reader - Disable backup domain write protection"]
pub type DBP_R = crate::BitReader<DBP_A>;
#[doc = "Disable backup domain write protection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DBP_A {
#[doc = "0: Access to RTC, RTC Backup and RCC CSR registers disabled"]
Disabled = 0,
#[doc = "1: Access to RTC, RTC Backup and RCC CSR registers enabled"]
Enabled = 1,
}
impl From<DBP_A> for bool {
#[inline(always)]
fn from(variant: DBP_A) -> Self {
variant as u8 != 0
}
}
impl DBP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DBP_A {
match self.bits {
false => DBP_A::Disabled,
true => DBP_A::Enabled,
}
}
#[doc = "Access to RTC, RTC Backup and RCC CSR registers disabled"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == DBP_A::Disabled
}
#[doc = "Access to RTC, RTC Backup and RCC CSR registers enabled"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == DBP_A::Enabled
}
}
#[doc = "Field `DBP` writer - Disable backup domain write protection"]
pub type DBP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DBP_A>;
impl<'a, REG, const O: u8> DBP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Access to RTC, RTC Backup and RCC CSR registers disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(DBP_A::Disabled)
}
#[doc = "Access to RTC, RTC Backup and RCC CSR registers enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(DBP_A::Enabled)
}
}
#[doc = "Field `ULP` reader - Ultra-low-power mode"]
pub type ULP_R = crate::BitReader<ULP_A>;
#[doc = "Ultra-low-power mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ULP_A {
#[doc = "0: VREFINT is on in low-power mode"]
Enabled = 0,
#[doc = "1: VREFINT is off in low-power mode"]
Disabled = 1,
}
impl From<ULP_A> for bool {
#[inline(always)]
fn from(variant: ULP_A) -> Self {
variant as u8 != 0
}
}
impl ULP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ULP_A {
match self.bits {
false => ULP_A::Enabled,
true => ULP_A::Disabled,
}
}
#[doc = "VREFINT is on in low-power mode"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == ULP_A::Enabled
}
#[doc = "VREFINT is off in low-power mode"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == ULP_A::Disabled
}
}
#[doc = "Field `ULP` writer - Ultra-low-power mode"]
pub type ULP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ULP_A>;
impl<'a, REG, const O: u8> ULP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "VREFINT is on in low-power mode"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(ULP_A::Enabled)
}
#[doc = "VREFINT is off in low-power mode"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(ULP_A::Disabled)
}
}
#[doc = "Field `FWU` reader - Fast wakeup"]
pub type FWU_R = crate::BitReader<FWU_A>;
#[doc = "Fast wakeup\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FWU_A {
#[doc = "0: Low-power modes exit occurs only when VREFINT is ready"]
Disabled = 0,
#[doc = "1: VREFINT start up time is ignored when exiting low-power modes"]
Enabled = 1,
}
impl From<FWU_A> for bool {
#[inline(always)]
fn from(variant: FWU_A) -> Self {
variant as u8 != 0
}
}
impl FWU_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FWU_A {
match self.bits {
false => FWU_A::Disabled,
true => FWU_A::Enabled,
}
}
#[doc = "Low-power modes exit occurs only when VREFINT is ready"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == FWU_A::Disabled
}
#[doc = "VREFINT start up time is ignored when exiting low-power modes"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == FWU_A::Enabled
}
}
#[doc = "Field `FWU` writer - Fast wakeup"]
pub type FWU_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, FWU_A>;
impl<'a, REG, const O: u8> FWU_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Low-power modes exit occurs only when VREFINT is ready"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(FWU_A::Disabled)
}
#[doc = "VREFINT start up time is ignored when exiting low-power modes"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(FWU_A::Enabled)
}
}
#[doc = "Field `VOS` reader - Voltage scaling range selection"]
pub type VOS_R = crate::FieldReader<VOS_A>;
#[doc = "Voltage scaling range selection\n\nValue on reset: 2"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum VOS_A {
#[doc = "1: 1.8 V (range 1)"]
V18 = 1,
#[doc = "2: 1.5 V (range 2)"]
V15 = 2,
#[doc = "3: 1.2 V (range 3)"]
V12 = 3,
}
impl From<VOS_A> for u8 {
#[inline(always)]
fn from(variant: VOS_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for VOS_A {
type Ux = u8;
}
impl VOS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<VOS_A> {
match self.bits {
1 => Some(VOS_A::V18),
2 => Some(VOS_A::V15),
3 => Some(VOS_A::V12),
_ => None,
}
}
#[doc = "1.8 V (range 1)"]
#[inline(always)]
pub fn is_v1_8(&self) -> bool {
*self == VOS_A::V18
}
#[doc = "1.5 V (range 2)"]
#[inline(always)]
pub fn is_v1_5(&self) -> bool {
*self == VOS_A::V15
}
#[doc = "1.2 V (range 3)"]
#[inline(always)]
pub fn is_v1_2(&self) -> bool {
*self == VOS_A::V12
}
}
#[doc = "Field `VOS` writer - Voltage scaling range selection"]
pub type VOS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, VOS_A>;
impl<'a, REG, const O: u8> VOS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "1.8 V (range 1)"]
#[inline(always)]
pub fn v1_8(self) -> &'a mut crate::W<REG> {
self.variant(VOS_A::V18)
}
#[doc = "1.5 V (range 2)"]
#[inline(always)]
pub fn v1_5(self) -> &'a mut crate::W<REG> {
self.variant(VOS_A::V15)
}
#[doc = "1.2 V (range 3)"]
#[inline(always)]
pub fn v1_2(self) -> &'a mut crate::W<REG> {
self.variant(VOS_A::V12)
}
}
#[doc = "Field `DS_EE_KOFF` reader - Deep sleep mode with Flash memory kept off"]
pub type DS_EE_KOFF_R = crate::BitReader<DS_EE_KOFF_A>;
#[doc = "Deep sleep mode with Flash memory kept off\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DS_EE_KOFF_A {
#[doc = "0: NVM woken up when exiting from Deepsleep mode even if the bit RUN_PD is set"]
NvmwakeUp = 0,
#[doc = "1: NVM not woken up when exiting from low-power mode (if the bit RUN_PD is set)"]
Nvmsleep = 1,
}
impl From<DS_EE_KOFF_A> for bool {
#[inline(always)]
fn from(variant: DS_EE_KOFF_A) -> Self {
variant as u8 != 0
}
}
impl DS_EE_KOFF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DS_EE_KOFF_A {
match self.bits {
false => DS_EE_KOFF_A::NvmwakeUp,
true => DS_EE_KOFF_A::Nvmsleep,
}
}
#[doc = "NVM woken up when exiting from Deepsleep mode even if the bit RUN_PD is set"]
#[inline(always)]
pub fn is_nvmwake_up(&self) -> bool {
*self == DS_EE_KOFF_A::NvmwakeUp
}
#[doc = "NVM not woken up when exiting from low-power mode (if the bit RUN_PD is set)"]
#[inline(always)]
pub fn is_nvmsleep(&self) -> bool {
*self == DS_EE_KOFF_A::Nvmsleep
}
}
#[doc = "Field `DS_EE_KOFF` writer - Deep sleep mode with Flash memory kept off"]
pub type DS_EE_KOFF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DS_EE_KOFF_A>;
impl<'a, REG, const O: u8> DS_EE_KOFF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "NVM woken up when exiting from Deepsleep mode even if the bit RUN_PD is set"]
#[inline(always)]
pub fn nvmwake_up(self) -> &'a mut crate::W<REG> {
self.variant(DS_EE_KOFF_A::NvmwakeUp)
}
#[doc = "NVM not woken up when exiting from low-power mode (if the bit RUN_PD is set)"]
#[inline(always)]
pub fn nvmsleep(self) -> &'a mut crate::W<REG> {
self.variant(DS_EE_KOFF_A::Nvmsleep)
}
}
#[doc = "Field `LPRUN` reader - Low power run mode"]
pub type LPRUN_R = crate::BitReader<LPRUN_A>;
#[doc = "Low power run mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LPRUN_A {
#[doc = "0: Voltage regulator in Main mode in Low-power run mode"]
MainMode = 0,
#[doc = "1: Voltage regulator in low-power mode in Low-power run mode"]
LowPowerMode = 1,
}
impl From<LPRUN_A> for bool {
#[inline(always)]
fn from(variant: LPRUN_A) -> Self {
variant as u8 != 0
}
}
impl LPRUN_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPRUN_A {
match self.bits {
false => LPRUN_A::MainMode,
true => LPRUN_A::LowPowerMode,
}
}
#[doc = "Voltage regulator in Main mode in Low-power run mode"]
#[inline(always)]
pub fn is_main_mode(&self) -> bool {
*self == LPRUN_A::MainMode
}
#[doc = "Voltage regulator in low-power mode in Low-power run mode"]
#[inline(always)]
pub fn is_low_power_mode(&self) -> bool {
*self == LPRUN_A::LowPowerMode
}
}
#[doc = "Field `LPRUN` writer - Low power run mode"]
pub type LPRUN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LPRUN_A>;
impl<'a, REG, const O: u8> LPRUN_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Voltage regulator in Main mode in Low-power run mode"]
#[inline(always)]
pub fn main_mode(self) -> &'a mut crate::W<REG> {
self.variant(LPRUN_A::MainMode)
}
#[doc = "Voltage regulator in low-power mode in Low-power run mode"]
#[inline(always)]
pub fn low_power_mode(self) -> &'a mut crate::W<REG> {
self.variant(LPRUN_A::LowPowerMode)
}
}
#[doc = "Field `LPDS` reader - Regulator in Low-power deepsleep mode"]
pub type LPDS_R = crate::BitReader<LPDS_A>;
#[doc = "Regulator in Low-power deepsleep mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LPDS_A {
#[doc = "0: Voltage regulator in Main mode during Deepsleep mode (Stop mode)"]
MainMode = 0,
#[doc = "1: Voltage regulator switches to low-power mode when the CPU enters Deepsleep mode (Stop mode)"]
LowPowerMode = 1,
}
impl From<LPDS_A> for bool {
#[inline(always)]
fn from(variant: LPDS_A) -> Self {
variant as u8 != 0
}
}
impl LPDS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPDS_A {
match self.bits {
false => LPDS_A::MainMode,
true => LPDS_A::LowPowerMode,
}
}
#[doc = "Voltage regulator in Main mode during Deepsleep mode (Stop mode)"]
#[inline(always)]
pub fn is_main_mode(&self) -> bool {
*self == LPDS_A::MainMode
}
#[doc = "Voltage regulator switches to low-power mode when the CPU enters Deepsleep mode (Stop mode)"]
#[inline(always)]
pub fn is_low_power_mode(&self) -> bool {
*self == LPDS_A::LowPowerMode
}
}
#[doc = "Field `LPDS` writer - Regulator in Low-power deepsleep mode"]
pub type LPDS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LPDS_A>;
impl<'a, REG, const O: u8> LPDS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Voltage regulator in Main mode during Deepsleep mode (Stop mode)"]
#[inline(always)]
pub fn main_mode(self) -> &'a mut crate::W<REG> {
self.variant(LPDS_A::MainMode)
}
#[doc = "Voltage regulator switches to low-power mode when the CPU enters Deepsleep mode (Stop mode)"]
#[inline(always)]
pub fn low_power_mode(self) -> &'a mut crate::W<REG> {
self.variant(LPDS_A::LowPowerMode)
}
}
impl R {
#[doc = "Bit 0 - Low-power deepsleep/Sleep/Low-power run"]
#[inline(always)]
pub fn lpsdsr(&self) -> LPSDSR_R {
LPSDSR_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Power down deepsleep"]
#[inline(always)]
pub fn pdds(&self) -> PDDS_R {
PDDS_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Clear wakeup flag"]
#[inline(always)]
pub fn cwuf(&self) -> CWUF_R {
CWUF_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Clear standby flag"]
#[inline(always)]
pub fn csbf(&self) -> CSBF_R {
CSBF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Power voltage detector enable"]
#[inline(always)]
pub fn pvde(&self) -> PVDE_R {
PVDE_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 5:7 - PVD level selection"]
#[inline(always)]
pub fn pls(&self) -> PLS_R {
PLS_R::new(((self.bits >> 5) & 7) as u8)
}
#[doc = "Bit 8 - Disable backup domain write protection"]
#[inline(always)]
pub fn dbp(&self) -> DBP_R {
DBP_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Ultra-low-power mode"]
#[inline(always)]
pub fn ulp(&self) -> ULP_R {
ULP_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Fast wakeup"]
#[inline(always)]
pub fn fwu(&self) -> FWU_R {
FWU_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bits 11:12 - Voltage scaling range selection"]
#[inline(always)]
pub fn vos(&self) -> VOS_R {
VOS_R::new(((self.bits >> 11) & 3) as u8)
}
#[doc = "Bit 13 - Deep sleep mode with Flash memory kept off"]
#[inline(always)]
pub fn ds_ee_koff(&self) -> DS_EE_KOFF_R {
DS_EE_KOFF_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Low power run mode"]
#[inline(always)]
pub fn lprun(&self) -> LPRUN_R {
LPRUN_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 16 - Regulator in Low-power deepsleep mode"]
#[inline(always)]
pub fn lpds(&self) -> LPDS_R {
LPDS_R::new(((self.bits >> 16) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Low-power deepsleep/Sleep/Low-power run"]
#[inline(always)]
#[must_use]
pub fn lpsdsr(&mut self) -> LPSDSR_W<CR_SPEC, 0> {
LPSDSR_W::new(self)
}
#[doc = "Bit 1 - Power down deepsleep"]
#[inline(always)]
#[must_use]
pub fn pdds(&mut self) -> PDDS_W<CR_SPEC, 1> {
PDDS_W::new(self)
}
#[doc = "Bit 2 - Clear wakeup flag"]
#[inline(always)]
#[must_use]
pub fn cwuf(&mut self) -> CWUF_W<CR_SPEC, 2> {
CWUF_W::new(self)
}
#[doc = "Bit 3 - Clear standby flag"]
#[inline(always)]
#[must_use]
pub fn csbf(&mut self) -> CSBF_W<CR_SPEC, 3> {
CSBF_W::new(self)
}
#[doc = "Bit 4 - Power voltage detector enable"]
#[inline(always)]
#[must_use]
pub fn pvde(&mut self) -> PVDE_W<CR_SPEC, 4> {
PVDE_W::new(self)
}
#[doc = "Bits 5:7 - PVD level selection"]
#[inline(always)]
#[must_use]
pub fn pls(&mut self) -> PLS_W<CR_SPEC, 5> {
PLS_W::new(self)
}
#[doc = "Bit 8 - Disable backup domain write protection"]
#[inline(always)]
#[must_use]
pub fn dbp(&mut self) -> DBP_W<CR_SPEC, 8> {
DBP_W::new(self)
}
#[doc = "Bit 9 - Ultra-low-power mode"]
#[inline(always)]
#[must_use]
pub fn ulp(&mut self) -> ULP_W<CR_SPEC, 9> {
ULP_W::new(self)
}
#[doc = "Bit 10 - Fast wakeup"]
#[inline(always)]
#[must_use]
pub fn fwu(&mut self) -> FWU_W<CR_SPEC, 10> {
FWU_W::new(self)
}
#[doc = "Bits 11:12 - Voltage scaling range selection"]
#[inline(always)]
#[must_use]
pub fn vos(&mut self) -> VOS_W<CR_SPEC, 11> {
VOS_W::new(self)
}
#[doc = "Bit 13 - Deep sleep mode with Flash memory kept off"]
#[inline(always)]
#[must_use]
pub fn ds_ee_koff(&mut self) -> DS_EE_KOFF_W<CR_SPEC, 13> {
DS_EE_KOFF_W::new(self)
}
#[doc = "Bit 14 - Low power run mode"]
#[inline(always)]
#[must_use]
pub fn lprun(&mut self) -> LPRUN_W<CR_SPEC, 14> {
LPRUN_W::new(self)
}
#[doc = "Bit 16 - Regulator in Low-power deepsleep mode"]
#[inline(always)]
#[must_use]
pub fn lpds(&mut self) -> LPDS_W<CR_SPEC, 16> {
LPDS_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "power control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR_SPEC;
impl crate::RegisterSpec for CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr::R`](R) reader structure"]
impl crate::Readable for CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr::W`](W) writer structure"]
impl crate::Writable for CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR to value 0x1000"]
impl crate::Resettable for CR_SPEC {
const RESET_VALUE: Self::Ux = 0x1000;
}
|
const PI: f32 = 3.16;
const MAX_POINTS: u32 = 100_000;
fn err() {
let _x = 5;
// _x = 6; // error!
}
#[allow(unused_variables)]
#[allow(unused_assignments)]
fn ok() {
let mut x = 5;
x = 6; // no problem!
// it’s not so much that the value at _x is changing,
// but that the binding changed from one i32 to another
// in pattern:
let (mut _x, _y) = (5, 6);
}
fn interior() {
use std::sync::Arc;
let x = Arc::new(5);
let _y = x;
// the mutation is entirely contained inside the structure itself
}
fn exterior() {
use std::cell::RefCell;
let x = RefCell::new(42);
let _y = x.borrow_mut();
let _z = x.borrow_mut(); // will panic at runtime
}
// Mutability is a property of either a borrow (&mut) or a binding (let mut)
fn emulate_field_mutability() {
use std::cell::Cell;
struct Point {
x: i32,
y: Cell<i32>, // a mutable memory location
}
let point = Point { x: 5, y: Cell::new(6) };
point.y.set(7);
println!("y: {:?}", point.y);
}
fn shadowing() {
let _spaces = " ";
let _spaces = _spaces.len();
// different variable, so changing type is allowed
}
fn main() {
// exterior()
}
|
extern crate gocar;
extern crate toml;
fn load_config() -> gocar::Project {
use std::io::Read;
let mut config = Vec::new();
std::fs::File::open("Gocar.toml")
.unwrap()
.read_to_end(&mut config)
.unwrap();
let mut config = toml::from_slice::<gocar::Project>(&config).unwrap();
config.init_default_profiles();
//println!("Config: {:?}", config);
config
}
fn build(profile: &str) {
let config = load_config();
let target = AsRef::<std::path::Path>::as_ref("target").join(profile);
let current_dir = std::env::current_dir().expect("Invalid current working directory");
std::fs::create_dir_all(&target).unwrap();
match config.build(&target, ¤t_dir, profile, gocar::LibraryType::Static) {
Ok(()) => (),
Err(gocar::Error::Command(err)) => println!(" \u{1B}[31;1mError\u{1B}[0m: {}", err),
Err(gocar::Error::Filesystem(err)) => println!(" \u{1B}[31;1mError\u{1B}[0m: {}", err),
Err(gocar::Error::InvalidProfileName) => println!(" \u{1B}[31;1mError\u{1B}[0m: invalid profile name"),
}
}
fn test(profile_name: &str) {
let config = load_config();
let headers_only = config.headers_only.iter().map(|path| path.canonicalize()).collect::<Result<_, _>>().expect("Failed to canonicalize headers_only");
let mut target = AsRef::<std::path::Path>::as_ref("target").join(profile_name);
target.push("integration_tests");
let profile = config.profiles.get(profile_name).expect("unknown profile");
//println!("Testing with profile: {:?}", profile);
let mut test_count = 0;
let mut fail_count = 0;
let current_dir = std::env::current_dir().expect("Invalid current working directory");
std::fs::create_dir_all(&target).unwrap();
let (include_dir, lib_dirs, libs) = config.build_dependencies(&target, ¤t_dir, profile_name, gocar::LibraryType::Static).unwrap();
for test in std::fs::read_dir("tests").unwrap().map(Result::unwrap).map(|e| e.path()) {
let extension_is_valid = if let Some(extension) = test.extension() {
extension == "c" || extension == "cpp"
} else {
continue;
};
let test_name: std::path::PathBuf = test.file_stem().unwrap().into();
if extension_is_valid {
let binary = gocar::Binary {
target: gocar::Target {
name: test_name.clone(),
root_files: std::iter::once(test).collect(),
compile_options: gocar::CompileOptions::debug(),
link_options: Vec::new(),
ignore_files: Default::default(),
_phantom: Default::default(),
}
};
test_count += 1;
let env = gocar::BuildEnv {
target_dir: &target,
include_dir: &include_dir,
include_dirs: &[],
lib_dirs: &lib_dirs,
libs: &libs,
profile,
strip_prefix: ¤t_dir,
project_dir: ¤t_dir,
project: &config,
headers_only: &headers_only,
os: gocar::OsSpec::linux(),
};
binary.build(&env).unwrap();
let test_binary = target.join(&test_name);
println!(" \u{1B}[32;1mRunning\u{1B}[0m {:?}", test_binary);
if !std::process::Command::new(&test_binary)
.spawn().unwrap()
.wait().unwrap()
.success() {
fail_count += 1;
println!(" \u{1B}[31;1mFailed\u{1B}[0m {:?}", test_binary);
}
}
}
println!("test result: {}. total: {}; passed: {}; failed: {}", if fail_count == 0 { "\u{1B}[32mok\u{1B}[0m" } else { "\u{1B}[31mFAILED\u{1B}[0m" }, test_count, test_count - fail_count, fail_count);
}
fn main() {
let mut args = std::env::args();
args.next().expect("Not even zeroth argument given");
let action = args.next().expect("Usage: gocar (build [--release] | run [--release] | test)");
let profile = if let Some("--release") = args.next().as_ref().map(AsRef::as_ref) {
"release"
} else {
"debug"
};
match action.as_ref() {
"build" => build(profile),
"run" => unimplemented!(),
"test" => test(profile),
_ => panic!("Unknown action: {}", action),
}
}
|
use crate::commands::ledger::{get_icpts_from_args, send_and_notify};
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use crate::lib::nns_types::account_identifier::Subaccount;
use crate::lib::nns_types::icpts::{ICPTs, TRANSACTION_FEE};
use crate::lib::nns_types::{CyclesResponse, Memo};
use crate::util::clap::validators::{e8s_validator, icpts_amount_validator};
use anyhow::anyhow;
use clap::Clap;
use ic_types::principal::Principal;
use std::str::FromStr;
const MEMO_TOP_UP_CANISTER: u64 = 1347768404_u64;
/// Top up a canister with cycles minted from ICP
#[derive(Clap)]
pub struct TopUpOpts {
/// Specify the canister id to top up
canister: String,
/// ICP to mint into cycles and deposit into destination canister
/// Can be specified as a Decimal with the fractional portion up to 8 decimal places
/// i.e. 100.012
#[clap(long, validator(icpts_amount_validator))]
amount: Option<String>,
/// Specify ICP as a whole number, helpful for use in conjunction with `--e8s`
#[clap(long, validator(e8s_validator), conflicts_with("amount"))]
icp: Option<String>,
/// Specify e8s as a whole number, helpful for use in conjunction with `--icp`
#[clap(long, validator(e8s_validator), conflicts_with("amount"))]
e8s: Option<String>,
/// Transaction fee, default is 10000 e8s.
#[clap(long, validator(icpts_amount_validator))]
fee: Option<String>,
/// Max fee, default is 10000 e8s.
#[clap(long, validator(icpts_amount_validator))]
max_fee: Option<String>,
}
pub async fn exec(env: &dyn Environment, opts: TopUpOpts) -> DfxResult {
let amount = get_icpts_from_args(opts.amount, opts.icp, opts.e8s)?;
let fee = opts.fee.map_or(Ok(TRANSACTION_FEE), |v| {
ICPTs::from_str(&v).map_err(|err| anyhow!(err))
})?;
let memo = Memo(MEMO_TOP_UP_CANISTER);
let to_subaccount = Some(Subaccount::from(&Principal::from_text(opts.canister)?));
let max_fee = opts
.max_fee
.map_or(Ok(TRANSACTION_FEE), |v| ICPTs::from_str(&v))
.map_err(|err| anyhow!(err))?;
let result = send_and_notify(env, memo, amount, fee, to_subaccount, max_fee).await?;
match result {
CyclesResponse::ToppedUp(()) => {
println!("Canister was topped up!");
}
CyclesResponse::Refunded(msg, maybe_block_height) => {
match maybe_block_height {
Some(height) => {
println!("Refunded at block height {} with message :{}", height, msg)
}
None => println!("Refunded with message: {}", msg),
};
}
CyclesResponse::CanisterCreated(_) => unreachable!(),
};
Ok(())
}
|
// #![cfg(feature = "cbor")]
// #![cfg(feature = "json")]
// #![cfg(not(feature = "lsp"))]
// #[macro_use]
// extern crate log;
// use cddl::{
// cddl_from_str, parser::root_type_name_from_cddl_str, validate_cbor_from_slice,
// validate_json_from_str,
// };
// use clap::{ArgGroup, Args, Parser, Subcommand};
// use simplelog::*;
// use std::{
// error::Error,
// fmt::Write,
// fs::{self, File},
// io::{self, BufReader, Read},
// path::Path,
// };
// #[derive(Parser)]
// #[clap(author, version, about = "Tool for verifying conformance of CDDL definitions against RFC 8610 and for validating JSON documents and CBOR binary files", long_about = None)]
// struct Cli {
// /// Enable CI mode, failing if files cannot be found or other edge cases.
// #[clap(long)]
// ci: bool,
// #[clap(subcommand)]
// command: Commands,
// }
// #[derive(Subcommand)]
// enum Commands {
// #[clap(name = "compile-cddl", about = "Compile CDDL against RFC 8610")]
// CompileCddl {
// #[clap(short = 'c', long = "cddl", help = "Path to CDDL document")]
// file: String,
// },
// #[clap(name = "compile-json", about = "Compile JSON against RFC 8259")]
// CompileJson {
// #[clap(short = 'j', long = "json", help = "Path to JSON document")]
// file: String,
// },
// Validate(Validate),
// }
// #[derive(Args)]
// #[clap(about = "Validate JSON and/or CBOR against a CDDL definition")]
// #[clap(group(ArgGroup::new("targets").required(true).multiple(true).args(&["stdin", "json", "cbor"])))]
// struct Validate {
// #[clap(short = 'd', long = "cddl", help = "CDDL document")]
// cddl: String,
// #[clap(
// short = 'f',
// long = "features",
// help = "Optional features to enable during validation",
// use_value_delimiter = true
// )]
// features: Option<Vec<String>>,
// #[clap(
// short = 'j',
// long = "json",
// help = "JSON document(s) to validate",
// use_value_delimiter = true,
// multiple_values = true
// )]
// json: Option<Vec<String>>,
// #[clap(
// short = 'c',
// long = "cbor",
// help = "CBOR binary file(s) to validate",
// multiple_values = true,
// use_value_delimiter = true
// )]
// cbor: Option<Vec<String>>,
// #[clap(
// long = "stdin",
// help = "JSON or CBOR input from stdin. Assumes UTF-8 encoding is JSON, otherwise parses as CBOR"
// )]
// stdin: bool,
// }
// macro_rules! error {
// ($ci: expr, $($args: tt)+ ) => {
// log::error!($($args)+);
// if $ci {
// return Err(format!($($args)+).into());
// }
// };
// }
// fn main() -> Result<(), Box<dyn Error>> {
// TermLogger::init(
// LevelFilter::Info,
// ConfigBuilder::new()
// .set_time_level(LevelFilter::Off)
// .build(),
// TerminalMode::Mixed,
// ColorChoice::Auto,
// )?;
// let cli = Cli::parse();
// match &cli.command {
// Commands::CompileCddl { file } => {
// let p = Path::new(file);
// if !p.exists() {
// error!(cli.ci, "CDDL document {:?} does not exist", p);
// return Ok(());
// }
// let file_content = fs::read_to_string(file)?;
// cddl_from_str(&file_content, true).map(|_| ())?;
// info!("{} is conformant", file);
// }
// Commands::CompileJson { file } => {
// let p = Path::new(file);
// if !p.exists() {
// error!(cli.ci, "JSON document {:?} does not exist", p);
// return Ok(());
// }
// let file = File::open(file)?;
// let reader = BufReader::new(file);
// let _: serde_json::Value = serde_json::from_reader(reader)?;
// return Ok(());
// }
// Commands::Validate(validate) => {
// #[cfg(feature = "additional-controls")]
// let enabled_features: Option<Vec<&str>> = validate
// .features
// .as_ref()
// .map(|f| f.iter().map(|s| s.as_str()).collect());
// #[cfg(feature = "additional-controls")]
// if let Some(enabled_features) = &enabled_features {
// let mut feature_str = String::from("enabled features: [");
// for (idx, feature) in enabled_features.iter().enumerate() {
// if idx == 0 {
// let _ = write!(feature_str, "\"{}\"", feature);
// } else {
// let _ = write!(feature_str, ", \"{}\"", feature);
// }
// }
// feature_str.push(']');
// info!("{}", feature_str);
// }
// let p = Path::new(&validate.cddl);
// if !p.exists() {
// error!(cli.ci, "CDDL document {:?} does not exist", p);
// return Ok(());
// }
// let cddl_str = fs::read_to_string(&validate.cddl)?;
// info!(
// "Root type for validation: {}",
// root_type_name_from_cddl_str(&cddl_str)?
// );
// if let Some(files) = &validate.json {
// for file in files {
// let p = Path::new(file);
// if !p.exists() {
// error!(cli.ci, "File {:?} does not exist", p);
// continue;
// }
// #[cfg(feature = "additional-controls")]
// let r = validate_json_from_str(
// &cddl_str,
// &fs::read_to_string(file)?,
// enabled_features.as_deref(),
// );
// #[cfg(not(feature = "additional-controls"))]
// let r = validate_json_from_str(&cddl_str, &fs::read_to_string(file)?);
// match r {
// Ok(_) => {
// info!("Validation of {:?} is successful", p);
// }
// Err(e) => {
// error!(
// cli.ci,
// "Validation of {:?} failed: {}",
// p,
// e.to_string().trim_end()
// );
// }
// }
// }
// }
// if let Some(files) = &validate.cbor {
// for file in files {
// let p = Path::new(file);
// if !p.exists() {
// error!(cli.ci, "CBOR binary file {:?} does not exist", p);
// continue;
// }
// let mut f = File::open(p)?;
// let mut data = Vec::new();
// f.read_to_end(&mut data)?;
// #[cfg(feature = "additional-controls")]
// let c = validate_cbor_from_slice(&cddl_str, &data, None);
// #[cfg(not(feature = "additional-controls"))]
// let c = validate_cbor_from_slice(&cddl_str, &data);
// match c {
// Ok(_) => {
// info!("Validation of {:?} is successful", p);
// }
// Err(e) => {
// error!(
// cli.ci,
// "Validation of {:?} failed: {}",
// p,
// e.to_string().trim_end()
// );
// }
// }
// }
// }
// if validate.stdin {
// let stdin = io::stdin();
// let mut reader = stdin.lock();
// let mut data = Vec::new();
// reader.read_to_end(&mut data)?;
// if let Ok(json) = std::str::from_utf8(&data) {
// #[cfg(feature = "additional-controls")]
// let r = validate_json_from_str(&cddl_str, json, None);
// #[cfg(not(feature = "additional-controls"))]
// let r = validate_json_from_str(&cddl_str, json);
// match r {
// Ok(_) => {
// info!("Validation from stdin is successful");
// }
// Err(e) => {
// error!(
// cli.ci,
// "Validation from stdin failed: {}",
// e.to_string().trim_end()
// );
// }
// }
// } else {
// #[cfg(feature = "additional-controls")]
// let c = validate_cbor_from_slice(&cddl_str, &data, enabled_features.as_deref());
// #[cfg(not(feature = "additional-controls"))]
// let c = validate_cbor_from_slice(&cddl_str, &data);
// match c {
// Ok(_) => {
// info!("Validation from stdin is successful");
// }
// Err(e) => {
// error!(
// cli.ci,
// "Validation from stdin failed: {}",
// e.to_string().trim_end()
// );
// }
// }
// }
// }
// }
// }
// Ok(())
// }
fn main() {}
|
use bincode::{deserialize, serialize};
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use entry::Entry;
use lsm_tree::{Error, Result};
use probabilistic_collections::bloom::BloomFilter;
use rand::{thread_rng, Rng};
use serde::de::{self, Deserialize, DeserializeOwned, Deserializer};
use serde::ser::{Serialize, Serializer};
use std::cmp;
use std::fmt::{self, Debug};
use std::fs;
use std::hash::Hash;
use std::io::{BufWriter, ErrorKind, Read, Seek, SeekFrom, Write};
use std::marker::PhantomData;
use std::path::{Path, PathBuf};
use std::result;
pub fn merge_ranges<T>(range_1: (T, T), range_2: (T, T)) -> (T, T)
where
T: Ord,
{
(
cmp::min(range_1.0, range_2.0),
cmp::max(range_1.1, range_2.1),
)
}
pub fn is_intersecting<T>(range_1: &(T, T), range_2: &(T, T)) -> bool
where
T: Ord,
{
let l = cmp::max(&range_1.0, &range_2.0);
let r = cmp::min(&range_1.1, &range_2.1);
l <= r
}
#[derive(Deserialize, Serialize)]
pub struct SSTableValue<U> {
pub data: Option<U>,
pub logical_time: u64,
}
impl<U> PartialEq for SSTableValue<U> {
fn eq(&self, other: &SSTableValue<U>) -> bool {
self.logical_time == other.logical_time
}
}
impl<U> Ord for SSTableValue<U> {
fn cmp(&self, other: &SSTableValue<U>) -> cmp::Ordering {
other.logical_time.cmp(&self.logical_time)
}
}
impl<U> PartialOrd for SSTableValue<U> {
fn partial_cmp(&self, other: &SSTableValue<U>) -> Option<cmp::Ordering> {
Some(self.cmp(&other))
}
}
impl<U> Eq for SSTableValue<U> {}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct SSTableSummary<T> {
pub entry_count: usize,
pub tombstone_count: usize,
pub size: u64,
pub key_range: (T, T),
pub logical_time_range: (u64, u64),
pub index: Vec<(T, u64)>,
}
pub struct SSTableBuilder<T, U> {
pub sstable_path: PathBuf,
pub entry_count: usize,
pub tombstone_count: usize,
pub size: u64,
pub key_range: Option<(T, T)>,
pub logical_time_range: Option<(u64, u64)>,
pub index: Vec<(T, u64)>,
block_index: usize,
block_size: usize,
index_block: Vec<(T, u64)>,
filter: BloomFilter,
index_offset: u64,
index_stream: BufWriter<fs::File>,
data_offset: u64,
data_stream: BufWriter<fs::File>,
_marker: PhantomData<U>,
}
impl<T, U> SSTableBuilder<T, U>
where
T: Clone + DeserializeOwned + Hash + Serialize,
U: DeserializeOwned + Serialize,
{
fn generate_file_name() -> String {
thread_rng().gen_ascii_chars().take(32).collect()
}
pub fn new<P>(db_path: P, entry_count_hint: usize) -> Result<Self>
where
P: AsRef<Path>,
{
let db_path = PathBuf::from(db_path.as_ref());
let sstable_path = db_path.join(Self::generate_file_name());
fs::create_dir(sstable_path.as_path())?;
let data_file = fs::File::create(sstable_path.join("data.dat"))?;
let data_stream = BufWriter::new(data_file);
let index_file = fs::File::create(sstable_path.join("index.dat"))?;
let index_stream = BufWriter::new(index_file);
Ok(SSTableBuilder {
sstable_path,
entry_count: 0,
tombstone_count: 0,
size: 0,
key_range: None,
logical_time_range: None,
index: Vec::new(),
block_index: 0,
block_size: (entry_count_hint as f64).sqrt().ceil() as usize,
index_block: Vec::new(),
filter: BloomFilter::new(entry_count_hint, 0.05),
index_offset: 0,
index_stream,
data_offset: 0,
data_stream,
_marker: PhantomData,
})
}
pub fn append(&mut self, key: T, value: SSTableValue<U>) -> Result<()> {
let logical_time = value.logical_time;
self.entry_count += 1;
if value.data.is_none() {
self.tombstone_count += 1;
}
match self.key_range.take() {
Some((start, _)) => self.key_range = Some((start, key.clone())),
None => self.key_range = Some((key.clone(), key.clone())),
}
match self.logical_time_range.take() {
Some((start, end)) => {
self.logical_time_range = Some((
cmp::min(start, logical_time),
cmp::max(end, logical_time),
))
},
None => self.logical_time_range = Some((logical_time, logical_time)),
}
self.filter.insert(&key);
self.index_block.push((key.clone(), self.data_offset));
let serialized_entry = serialize(&(key, value))?;
self.data_stream.write_u64::<BigEndian>(serialized_entry.len() as u64)?;
self.data_stream.write_all(&serialized_entry)?;
self.data_offset += 8 + serialized_entry.len() as u64;
self.size += 8 + serialized_entry.len() as u64;
self.block_index += 1;
if self.block_index == self.block_size {
self.index.push((self.index_block[0].0.clone(), self.index_offset));
let serialized_index_block = serialize(&self.index_block)?;
self.index_stream.write_u64::<BigEndian>(serialized_index_block.len() as u64)?;
self.index_stream.write_all(&serialized_index_block)?;
self.index_offset += 8 + serialized_index_block.len() as u64;
self.size += 8 + serialized_index_block.len() as u64;
self.block_index = 0;
self.index_block.clear();
}
Ok(())
}
pub fn flush(&mut self) -> Result<PathBuf> {
if !self.index_block.is_empty() {
self.index.push((self.index_block[0].0.clone(), self.index_offset));
let serialized_index_block = serialize(&self.index_block)?;
self.index_stream.write_u64::<BigEndian>(serialized_index_block.len() as u64)?;
self.index_stream.write_all(&serialized_index_block)?;
}
let (key_range, logical_time_range) = match (self.key_range.clone(), self.logical_time_range) {
(Some(key_range), Some(logical_time_range)) => (key_range, logical_time_range),
_ => panic!("Expected non-empty SSTable"),
};
let serialized_summary = serialize(&SSTableSummary {
entry_count: self.entry_count,
tombstone_count: self.tombstone_count,
size: self.size,
key_range,
logical_time_range,
index: self.index.clone(),
})?;
let mut summary_file = fs::File::create(self.sstable_path.join("summary.dat"))?;
summary_file.write_all(&serialized_summary)?;
let serialized_filter = serialize(&self.filter)?;
let mut filter_file = fs::File::create(self.sstable_path.join("filter.dat"))?;
filter_file.write_all(&serialized_filter)?;
self.index_stream.flush()?;
self.data_stream.flush()?;
Ok(self.sstable_path.clone())
}
}
#[derive(Clone)]
pub struct SSTable<T, U> {
pub path: PathBuf,
pub summary: SSTableSummary<T>,
pub filter: BloomFilter,
_marker: PhantomData<U>,
}
impl<T, U> SSTable<T, U>
where
T: DeserializeOwned,
U: DeserializeOwned,
{
pub fn new<P>(path: P) -> Result<Self>
where
P: AsRef<Path>,
{
let mut buffer = Vec::new();
let mut file = fs::File::open(path.as_ref().join("summary.dat"))?;
file.read_to_end(&mut buffer)?;
let summary = deserialize(&buffer)?;
let mut buffer = Vec::new();
let mut file = fs::File::open(path.as_ref().join("filter.dat"))?;
file.read_to_end(&mut buffer)?;
let filter = deserialize(&buffer)?;
Ok(SSTable {
path: PathBuf::from(path.as_ref()),
summary,
filter,
_marker: PhantomData,
})
}
}
impl<T, U> SSTable<T, U>
where
T: Hash + DeserializeOwned + Ord + Serialize,
U: DeserializeOwned + Serialize,
{
fn floor_offset(index: &[(T, u64)], key: &T) -> Option<usize> {
let mut lo = 0isize;
let mut hi = index.len() as isize - 1;
while lo <= hi {
let mid = (lo + hi) / 2;
if index[mid as usize].0 <= *key {
lo = mid + 1;
} else {
hi = mid - 1;
}
}
if hi == -1 {
None
} else {
Some(hi as usize)
}
}
pub fn get(&self, key: &T) -> Result<Option<SSTableValue<U>>> {
if *key < self.summary.key_range.0 || *key > self.summary.key_range.1 {
return Ok(None);
}
if !self.filter.contains(key) {
return Ok(None);
}
let index = match Self::floor_offset(&self.summary.index, key) {
Some(index) => index,
None => return Ok(None),
};
let mut index_file = fs::File::open(self.path.join("index.dat"))?;
index_file.seek(SeekFrom::Start(self.summary.index[index].1))?;
let size = index_file.read_u64::<BigEndian>()?;
let mut buffer = vec![0; size as usize];
index_file.read_exact(buffer.as_mut_slice())?;
let index_block: Vec<(T, u64)> = deserialize(&buffer)?;
let index = match index_block.binary_search_by_key(&key, |index_entry| &index_entry.0) {
Ok(index) => index,
Err(_) => return Ok(None),
};
let mut data_file = fs::File::open(self.path.join("data.dat"))?;
data_file.seek(SeekFrom::Start(index_block[index].1))?;
let size = data_file.read_u64::<BigEndian>()?;
let mut buffer = vec![0; size as usize];
data_file.read_exact(buffer.as_mut_slice())?;
deserialize(&buffer)
.map_err(Error::SerdeError)
.map(|entry: Entry<T, SSTableValue<U>>| Some(entry.value))
}
pub fn data_iter(&self) -> SSTableDataIter<T, U> {
SSTableDataIter {
data_path: self.path.join("data.dat"),
data_file: None,
_marker: PhantomData,
}
}
}
pub struct SSTableDataIter<T, U> {
data_path: PathBuf,
data_file: Option<fs::File>,
_marker: PhantomData<(T, U)>,
}
impl<T, U> Iterator for SSTableDataIter<T, U>
where
T: DeserializeOwned,
U: DeserializeOwned,
{
type Item = Result<Entry<T, SSTableValue<U>>>;
fn next(&mut self) -> Option<Self::Item> {
if self.data_file.is_none() {
match fs::File::open(self.data_path.as_path()) {
Ok(data_file) => self.data_file = Some(data_file),
Err(error) => return Some(Err(Error::from(error))),
}
}
let data_file = self.data_file.as_mut().expect("Expected opened file.");
let size = match data_file.read_u64::<BigEndian>() {
Ok(size) => size,
Err(error) => {
match error.kind() {
ErrorKind::UnexpectedEof => return None,
_ => return Some(Err(Error::from(error))),
}
},
};
let mut buffer = vec![0; size as usize];
let result = data_file.read_exact(buffer.as_mut_slice());
if let Err(error) = result {
return Some(Err(Error::from(error)));
}
Some(deserialize(&buffer).map_err(Error::SerdeError))
}
}
impl<T, U> Serialize for SSTable<T, U> {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where
S: Serializer,
{
self.path.serialize(serializer)
}
}
impl<'de, T, U> Deserialize<'de> for SSTable<T, U>
where
T: DeserializeOwned,
U: DeserializeOwned,
{
fn deserialize<D>(deserializer: D) -> result::Result<SSTable<T, U>, D::Error>
where
D: Deserializer<'de>,
{
let ret = SSTable::new(PathBuf::deserialize(deserializer)?).map_err(de::Error::custom);
Ok(ret?)
}
}
impl<T, U> Debug for SSTable<T, U>
where
T: Debug,
U: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "entry count: {:?}", self.summary.entry_count)?;
writeln!(f, "tombstone count: {:?}", self.summary.tombstone_count)?;
writeln!(f, "key range: {:?}", self.summary.key_range)
}
}
|
#[cfg(feature = "python")]
pub mod py;
mod test;
/// The ideal single-chain model.
pub mod ideal;
/// The freely-jointed chain (FJC) single-chain model.
pub mod fjc;
/// The extensible freely-jointed chain (EFJC) single-chain model.
pub mod efjc;
/// The square-well freely-jointed chain (SWFJC) single-chain model.
pub mod swfjc;
/// The arbitrary link potential freely-jointed chain (uFJC) single-chain model.
pub mod ufjc;
/// The worm-like chain (WLC) single-chain model.
pub mod wlc;
static ONE: f64 = 1.0;
static ZERO: f64 = 1e-6;
static POINTS: u128 = 64;
|
#[cfg(test)]
mod test;
use crate::{
bson::{doc, Document},
cmap::{Command, RawCommandResponse, StreamDescription},
cursor::CursorSpecification,
error::{ErrorKind, Result},
operation::{
append_options,
CursorBody,
OperationWithDefaults,
Retryability,
SERVER_4_4_0_WIRE_VERSION,
},
options::{CursorType, FindOptions, SelectionCriteria},
Namespace,
};
#[derive(Debug)]
pub(crate) struct Find {
ns: Namespace,
filter: Option<Document>,
options: Option<Box<FindOptions>>,
}
impl Find {
#[cfg(test)]
fn empty() -> Self {
Self::new(
Namespace {
db: String::new(),
coll: String::new(),
},
None,
None,
)
}
pub(crate) fn new(
ns: Namespace,
filter: Option<Document>,
mut options: Option<FindOptions>,
) -> Self {
if let Some(ref mut options) = options {
if let Some(ref comment) = options.comment {
if options.comment_bson.is_none() {
options.comment_bson = Some(comment.clone().into());
}
}
}
Self {
ns,
filter,
options: options.map(Box::new),
}
}
}
impl OperationWithDefaults for Find {
type O = CursorSpecification;
type Command = Document;
const NAME: &'static str = "find";
fn build(&mut self, _description: &StreamDescription) -> Result<Command> {
let mut body = doc! {
Self::NAME: self.ns.coll.clone(),
};
if let Some(ref options) = self.options {
// negative limits should be interpreted as request for single batch as per crud spec.
if options.limit.map(|limit| limit < 0) == Some(true) {
body.insert("singleBatch", true);
}
if options
.batch_size
.map(|batch_size| batch_size > std::i32::MAX as u32)
== Some(true)
{
return Err(ErrorKind::InvalidArgument {
message: "The batch size must fit into a signed 32-bit integer".to_string(),
}
.into());
}
match options.cursor_type {
Some(CursorType::Tailable) => {
body.insert("tailable", true);
}
Some(CursorType::TailableAwait) => {
body.insert("tailable", true);
body.insert("awaitData", true);
}
_ => {}
};
}
append_options(&mut body, self.options.as_ref())?;
if let Some(ref filter) = self.filter {
body.insert("filter", filter.clone());
}
Ok(Command::new_read(
Self::NAME.to_string(),
self.ns.db.clone(),
self.options.as_ref().and_then(|o| o.read_concern.clone()),
body,
))
}
fn extract_at_cluster_time(
&self,
response: &bson::RawDocument,
) -> Result<Option<bson::Timestamp>> {
CursorBody::extract_at_cluster_time(response)
}
fn handle_response(
&self,
response: RawCommandResponse,
description: &StreamDescription,
) -> Result<Self::O> {
let response: CursorBody = response.body()?;
// The comment should only be propagated to getMore calls on 4.4+.
let comment = if description.max_wire_version.unwrap_or(0) < SERVER_4_4_0_WIRE_VERSION {
None
} else {
self.options
.as_ref()
.and_then(|opts| opts.comment_bson.clone())
};
Ok(CursorSpecification::new(
response.cursor,
description.server_address.clone(),
self.options.as_ref().and_then(|opts| opts.batch_size),
self.options.as_ref().and_then(|opts| opts.max_await_time),
comment,
))
}
fn supports_read_concern(&self, _description: &StreamDescription) -> bool {
true
}
fn selection_criteria(&self) -> Option<&SelectionCriteria> {
self.options
.as_ref()
.and_then(|opts| opts.selection_criteria.as_ref())
}
fn retryability(&self) -> Retryability {
Retryability::Read
}
}
|
// list of volume submodules
pub mod knitting;
pub mod point_cloud;
pub mod surface;
pub mod voxel;
|
use nannou::prelude::*;
use blend::*;
struct Model {
shapes: Vec<Shape>,
shape_index: usize
}
fn main() {
nannou::app(model).update(update).run();
}
fn model(app: &App) -> Model {
app.new_window().size(1920,1080).view(view).build().unwrap();
let square = blend::Shape::square_simple( pt2(0.0,0.0), 300.0, srgba(1.0,0.0,0.0,1.0));
let circle = blend::Shape::circle(pt2(0.0,0.0), 300.0, srgba(0.0,0.0,1.0,1.0));
let triangle = blend::Shape::triangle_simple(pt2(0.0,0.0), 300.0, srgba(0.0,1.0,0.0,1.0));
let num_steps = 100;
let blend1 = blend::blend(&circle, &square, num_steps);
let blend2 = blend::blend(&square, &triangle, num_steps);
let blend3 = blend::blend(&triangle, &circle, num_steps);
let mut blends:Vec<Shape> = Vec::new();
blends.extend(blend1);
blends.extend(blend2);
blends.extend(blend3);
// print!("{:?}", &blends);
Model {
shapes: blends,
shape_index: 0
}
}
fn update(app: &App, model: &mut Model, _update: Update) {
model.shape_index = (app.time * 90.0).floor() as usize;
}
fn view(app: &App, model: &Model, frame: Frame) {
let draw = app.draw();
// let t = app.time;
let boundary = app.window_rect();
draw.background().color(Rgb::new(0.1,0.1,0.1));
let mut i = 0;
// for point in model.shapes[model.shape_index % model.shapes.len()].clone().values {
// draw.line().start(pt2(0.0,0.0)).end(point.0).color(WHITE).x(boundary.left() * 1.8 + boundary.right() * 2.0);
// }
for _ in &model.shapes {
draw.polyline()
.points_colored(model.shapes[(model.shape_index + i as usize) % model.shapes.len()].clone().values)
.rotate((model.shape_index as f32 + i as f32) * 0.04)
.x(boundary.left() * 1.8 + boundary.right() * 2.0 * (i as f32 / model.shapes.len() as f32));
i += 1;
}
draw.to_frame(app, &frame).unwrap();
}
|
use anyhow::{anyhow, Result};
use itertools::Itertools;
use std::{collections::VecDeque, fs};
fn find_abberation(input: &Vec<u64>, preamble_length: usize) -> u64 {
let mut ring = VecDeque::<u64>::new();
input
.iter()
.enumerate()
.find_map(|(index, n)| {
if index < preamble_length {
ring.push_back(*n);
None
} else {
for (a, b) in ring.iter().tuple_combinations() {
if a + b == *n {
ring.push_back(*n);
ring.pop_front();
return None;
}
}
Some(*n)
}
})
.unwrap()
}
fn find_sequence(input: &Vec<u64>, abberation: u64) -> Result<u64> {
let input = &input[0..(input.iter().position(|&n| n == abberation).unwrap())];
for length in 2..(input.len()) {
for sequence in input.windows(length) {
if sequence.iter().sum::<u64>() == abberation {
return Ok(sequence.iter().min().unwrap() + sequence.iter().max().unwrap());
}
}
}
Err(anyhow!("Sequence not found"))
}
fn do_the_thing(input: &str, preamble_length: usize) -> Result<u64> {
let input = input
.lines()
.map(|n| n.parse::<u64>().unwrap())
.collect::<Vec<u64>>();
let abberation = find_abberation(&input, preamble_length);
find_sequence(&input, abberation)
}
fn main() -> Result<()> {
let input = fs::read_to_string("input.txt")?;
println!("{:?}", do_the_thing(&input, 25)?);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn second() {
let input = "35
20
15
25
47
40
62
55
65
95
102
117
150
182
127
219
299
277
309
576";
assert_eq!(62, do_the_thing(&input, 5).unwrap());
}
}
|
// Copyright (C) 2015-2021 Swift Navigation Inc.
// Contact: https://support.swiftnav.com
//
// This source is subject to the license found in the file 'LICENSE' which must
// be be distributed together with this source. All other rights reserved.
//
// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
// EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
//****************************************************************************
// Automatically generated from yaml/swiftnav/sbp/vehicle.yaml
// with generate.py. Please do not hand edit!
//****************************************************************************/
//! Messages from a vehicle.
#[allow(unused_imports)]
use std::convert::TryFrom;
#[allow(unused_imports)]
use byteorder::{LittleEndian, ReadBytesExt};
#[allow(unused_imports)]
use crate::serialize::SbpSerialize;
#[allow(unused_imports)]
use crate::SbpString;
/// Vehicle forward (x-axis) velocity
///
/// Message representing the x component of vehicle velocity in the user frame
/// at the odometry reference point(s) specified by the user. The offset for
/// the odometry reference point and the definition and origin of the user
/// frame are defined through the device settings interface. There are 4
/// possible user-defined sources of this message which are labeled
/// arbitrarily source 0 through 3. If using "processor time" time tags, the
/// receiving end will expect a `MSG_GNSS_TIME_OFFSET` when a PVT fix becomes
/// available to synchronise odometry measurements with GNSS. Processor time
/// shall roll over to zero after one week.
///
#[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))]
#[derive(Debug, Clone)]
#[allow(non_snake_case)]
pub struct MsgOdometry {
#[cfg_attr(feature = "sbp_serde", serde(skip_serializing))]
pub sender_id: Option<u16>,
/// Time field representing either milliseconds in the GPS Week or local CPU
/// time from the producing system in milliseconds. See the tow_source flag
/// for the exact source of this timestamp.
pub tow: u32,
/// The signed forward component of vehicle velocity.
pub velocity: i32,
/// Status flags
pub flags: u8,
}
impl MsgOdometry {
#[rustfmt::skip]
pub fn parse(_buf: &mut &[u8]) -> Result<MsgOdometry, crate::Error> {
Ok( MsgOdometry{
sender_id: None,
tow: _buf.read_u32::<LittleEndian>()?,
velocity: _buf.read_i32::<LittleEndian>()?,
flags: _buf.read_u8()?,
} )
}
}
impl super::SBPMessage for MsgOdometry {
fn get_message_name(&self) -> &'static str {
"MSG_ODOMETRY"
}
fn get_message_type(&self) -> u16 {
2307
}
fn get_sender_id(&self) -> Option<u16> {
self.sender_id
}
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = Some(new_id);
}
fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> {
let mut frame = Vec::new();
self.write_frame(&mut frame)?;
Ok(frame)
}
fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> {
crate::write_frame(self, frame)
}
#[cfg(feature = "swiftnav-rs")]
fn gps_time(
&self,
) -> Option<std::result::Result<crate::time::MessageTime, crate::time::GpsTimeError>> {
let tow_s = (self.tow as f64) / 1000.0;
let gps_time = match crate::time::GpsTime::new(0, tow_s) {
Ok(gps_time) => gps_time.tow(),
Err(e) => return Some(Err(e.into())),
};
Some(Ok(crate::time::MessageTime::Rover(gps_time.into())))
}
}
impl super::ConcreteMessage for MsgOdometry {
const MESSAGE_TYPE: u16 = 2307;
const MESSAGE_NAME: &'static str = "MSG_ODOMETRY";
}
impl TryFrom<super::SBP> for MsgOdometry {
type Error = super::TryFromSBPError;
fn try_from(msg: super::SBP) -> Result<Self, Self::Error> {
match msg {
super::SBP::MsgOdometry(m) => Ok(m),
_ => Err(super::TryFromSBPError),
}
}
}
impl crate::serialize::SbpSerialize for MsgOdometry {
#[allow(unused_variables)]
fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) {
self.tow.append_to_sbp_buffer(buf);
self.velocity.append_to_sbp_buffer(buf);
self.flags.append_to_sbp_buffer(buf);
}
fn sbp_size(&self) -> usize {
let mut size = 0;
size += self.tow.sbp_size();
size += self.velocity.sbp_size();
size += self.flags.sbp_size();
size
}
}
/// Accumulated wheeltick count message
///
/// Message containing the accumulated distance travelled by a wheel located
/// at an odometry reference point defined by the user. The offset for the
/// odometry reference point and the definition and origin of the user frame
/// are defined through the device settings interface. The source of this
/// message is identified by the source field, which is an integer ranging
/// from 0 to 255. The timestamp associated with this message should represent
/// the time when the accumulated tick count reached the value given by the
/// contents of this message as accurately as possible. If using "local CPU
/// time" time tags, the receiving end will expect a `MSG_GNSS_TIME_OFFSET`
/// when a PVT fix becomes available to synchronise wheeltick measurements
/// with GNSS. Local CPU time shall roll over to zero after one week.
///
#[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))]
#[derive(Debug, Clone)]
#[allow(non_snake_case)]
pub struct MsgWheeltick {
#[cfg_attr(feature = "sbp_serde", serde(skip_serializing))]
pub sender_id: Option<u16>,
/// Time field representing either microseconds since the last PPS,
/// microseconds in the GPS Week or local CPU time from the producing system
/// in microseconds. See the synch_type field for the exact meaning of this
/// timestamp.
pub time: u64,
/// Field indicating the type of timestamp contained in the time field.
pub flags: u8,
/// ID of the sensor producing this message
pub source: u8,
/// Free-running counter of the accumulated distance for this sensor. The
/// counter should be incrementing if travelling into one direction and
/// decrementing when travelling in the opposite direction.
pub ticks: i32,
}
impl MsgWheeltick {
#[rustfmt::skip]
pub fn parse(_buf: &mut &[u8]) -> Result<MsgWheeltick, crate::Error> {
Ok( MsgWheeltick{
sender_id: None,
time: _buf.read_u64::<LittleEndian>()?,
flags: _buf.read_u8()?,
source: _buf.read_u8()?,
ticks: _buf.read_i32::<LittleEndian>()?,
} )
}
}
impl super::SBPMessage for MsgWheeltick {
fn get_message_name(&self) -> &'static str {
"MSG_WHEELTICK"
}
fn get_message_type(&self) -> u16 {
2308
}
fn get_sender_id(&self) -> Option<u16> {
self.sender_id
}
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = Some(new_id);
}
fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> {
let mut frame = Vec::new();
self.write_frame(&mut frame)?;
Ok(frame)
}
fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> {
crate::write_frame(self, frame)
}
#[cfg(feature = "swiftnav-rs")]
fn gps_time(
&self,
) -> Option<std::result::Result<crate::time::MessageTime, crate::time::GpsTimeError>> {
// only consider wheelticks with synchronization type value "microsec in GPS week"
if self.flags != 1 {
return None;
}
let tow_s = (self.time as f64) / 1000000.0;
let gps_time = match crate::time::GpsTime::new(0, tow_s) {
Ok(gps_time) => gps_time.tow(),
Err(e) => return Some(Err(e.into())),
};
Some(Ok(crate::time::MessageTime::Rover(gps_time.into())))
}
}
impl super::ConcreteMessage for MsgWheeltick {
const MESSAGE_TYPE: u16 = 2308;
const MESSAGE_NAME: &'static str = "MSG_WHEELTICK";
}
impl TryFrom<super::SBP> for MsgWheeltick {
type Error = super::TryFromSBPError;
fn try_from(msg: super::SBP) -> Result<Self, Self::Error> {
match msg {
super::SBP::MsgWheeltick(m) => Ok(m),
_ => Err(super::TryFromSBPError),
}
}
}
impl crate::serialize::SbpSerialize for MsgWheeltick {
#[allow(unused_variables)]
fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) {
self.time.append_to_sbp_buffer(buf);
self.flags.append_to_sbp_buffer(buf);
self.source.append_to_sbp_buffer(buf);
self.ticks.append_to_sbp_buffer(buf);
}
fn sbp_size(&self) -> usize {
let mut size = 0;
size += self.time.sbp_size();
size += self.flags.sbp_size();
size += self.source.sbp_size();
size += self.ticks.sbp_size();
size
}
}
|
#[cfg(test)]
mod tests;
pub fn func(x: u32) {
match x {
0 => {}
1 => {}
2 => {}
_ => {}
}
}
#[test]
fn test() {
func(1);
func(3);
member1::func(0);
member2::func(0);
}
|
use crate::error::RPCError;
use ckb_jsonrpc_types::{BannedAddr, Node, NodeAddress, Timestamp};
use ckb_network::{MultiaddrExt, NetworkController};
use faketime::unix_time_as_millis;
use jsonrpc_core::Result;
use jsonrpc_derive::rpc;
use std::collections::HashMap;
const MAX_ADDRS: usize = 50;
const DEFAULT_BAN_DURATION: u64 = 24 * 60 * 60 * 1000; // 1 day
#[rpc(server)]
pub trait NetworkRpc {
// curl -d '{"id": 2, "jsonrpc": "2.0", "method":"local_node_info","params": []}' -H 'content-type:application/json' 'http://localhost:8114'
#[rpc(name = "local_node_info")]
fn local_node_info(&self) -> Result<Node>;
// curl -d '{"id": 2, "jsonrpc": "2.0", "method":"get_peers","params": []}' -H 'content-type:application/json' 'http://localhost:8114'
#[rpc(name = "get_peers")]
fn get_peers(&self) -> Result<Vec<Node>>;
// curl -d '{"id": 2, "jsonrpc": "2.0", "method":"get_banned_addresses","params": []}' -H 'content-type:application/json' 'http://localhost:8114'
#[rpc(name = "get_banned_addresses")]
fn get_banned_addresses(&self) -> Result<Vec<BannedAddr>>;
// curl -d '{"id": 2, "jsonrpc": "2.0", "method":"set_ban","params": ["192.168.0.0/24", "insert"]}' -H 'content-type:application/json' 'http://localhost:8114'
#[rpc(name = "set_ban")]
fn set_ban(
&self,
address: String,
command: String,
ban_time: Option<Timestamp>,
absolute: Option<bool>,
reason: Option<String>,
) -> Result<()>;
}
pub(crate) struct NetworkRpcImpl {
pub network_controller: NetworkController,
}
impl NetworkRpc for NetworkRpcImpl {
fn local_node_info(&self) -> Result<Node> {
Ok(Node {
version: self.network_controller.node_version().to_string(),
is_outbound: None,
node_id: self.network_controller.node_id(),
addresses: self
.network_controller
.public_urls(MAX_ADDRS)
.into_iter()
.map(|(address, score)| NodeAddress {
address,
score: u64::from(score).into(),
})
.collect(),
})
}
fn get_peers(&self) -> Result<Vec<Node>> {
let peers = self.network_controller.connected_peers();
Ok(peers
.into_iter()
.map(|(peer_id, peer)| {
let mut addresses: HashMap<_, _> = peer
.listened_addrs
.iter()
.filter_map(|addr| {
if let Ok((ip_addr, addr)) = addr.extract_ip_addr().and_then(|ip_addr| {
addr.attach_p2p(&peer_id).map(|addr| (ip_addr, addr))
}) {
Some((
ip_addr,
NodeAddress {
address: addr.to_string(),
score: 1.into(),
},
))
} else {
None
}
})
.collect();
if peer.is_outbound() {
if let Ok(ip_addr) = peer.connected_addr.extract_ip_addr() {
addresses.insert(
ip_addr,
NodeAddress {
address: peer.connected_addr.to_string(),
score: u64::from(std::u8::MAX).into(),
},
);
}
}
let addresses = addresses.values().cloned().collect();
Node {
is_outbound: Some(peer.is_outbound()),
version: peer
.identify_info
.map(|info| info.client_version)
.unwrap_or_else(|| "unknown".to_string()),
node_id: peer_id.to_base58(),
addresses,
}
})
.collect())
}
fn get_banned_addresses(&self) -> Result<Vec<BannedAddr>> {
Ok(self
.network_controller
.get_banned_addrs()
.into_iter()
.map(|banned| BannedAddr {
address: banned.address.to_string(),
ban_until: banned.ban_until.into(),
ban_reason: banned.ban_reason,
created_at: banned.created_at.into(),
})
.collect())
}
fn set_ban(
&self,
address: String,
command: String,
ban_time: Option<Timestamp>,
absolute: Option<bool>,
reason: Option<String>,
) -> Result<()> {
let ip_network = address
.parse()
.map_err(|_| RPCError::custom(RPCError::Invalid, "invalid address".to_owned()))?;
match command.as_ref() {
"insert" => {
let ban_until = if absolute.unwrap_or(false) {
ban_time.unwrap_or_default().into()
} else {
unix_time_as_millis()
+ ban_time
.unwrap_or_else(|| DEFAULT_BAN_DURATION.into())
.value()
};
if let Err(err) =
self.network_controller
.ban(ip_network, ban_until, reason.unwrap_or_default())
{
return Err(RPCError::custom(
RPCError::Invalid,
format!("ban address error {}", err),
));
}
}
"delete" => self.network_controller.unban(&ip_network),
_ => {
return Err(RPCError::custom(
RPCError::Invalid,
"invalid command".to_owned(),
))
}
}
Ok(())
}
}
|
use gamesession::InternalState;
use std::collections::HashMap;
use player::*;
use slog::Logger;
const LINCH_PERCENTAGE : f32 = 0.4;
#[derive(PartialEq,Debug)]
enum LynchState {
Picked,
Abstained,
}
#[derive(Debug)]
struct LynchPlayerState {
picked: usize,
state: LynchState,
}
pub struct MorningState<'a> {
state: &'a mut InternalState,
lynch_state: HashMap<usize, LynchPlayerState>,
log: Logger
}
impl<'a> MorningState<'a> {
pub fn new(state: &'a mut InternalState) -> MorningState<'a> {
let log = state.logger.new(o!("context"=> "MorningState"));
MorningState {
state: state,
lynch_state : HashMap::new(),
log: log,
}
}
pub fn can_vote(&mut self, player: usize) -> bool {
if self.state.first_night { // Ignore first night
warn!(self.log, "{:?} tried to vote in the first night!", player);
return false;
}
let plr : Option<&Player> = self.state.players.get(player);
if plr.is_none() { // Player doesn't exist
error!(self.log, "{:?} tried to vote, but doesn't exist?!?", player);
return false;
}
if plr.unwrap().is_ghost() {
return false;
}
!self.lynch_state.contains_key(&player)
}
pub fn has_team_won(&self) -> PlayerRole {
let mut mafia = 0;
let mut civ = 0;
let mut lone_wolf = 0;
for plr in &self.state.players {
if plr.is_ghost(){
continue;
}
if plr.role() == &PlayerRole::Mafia {
mafia += 1;
}
else {
civ += 1;
}
}
debug!(self.log, "CURRENT_SCORE";"CIV"=>civ, "MAFIA"=>mafia);
if mafia == 0 {
return PlayerRole::Civilian;
}
if mafia > civ {
return PlayerRole::Mafia;
}
else {
return PlayerRole::Unassigned;
}
}
pub fn abstain(&mut self, player: usize) {
if !self.can_vote(player) {
warn!(self.log, "{:?} tried to abstain, but can't vote.", player);
return;
}
self.lynch_state.insert(player, LynchPlayerState {
picked: 0,
state: LynchState::Abstained,
});
}
pub fn pick_target(&mut self, player: usize, victim: usize) -> Result<usize,&str> {
if !self.can_vote(player) {
return Err("Player cannot vote.");
}
let plr : Option<&Player> = self.state.players.get(victim);
if plr.is_none() {
error!(self.log, "{:?} tried to vote for {:?}, but they do not exist", player, victim);
return Err("Victim not found.");
}
if plr.unwrap().is_ghost() {
error!(self.log, "{:?} tried to vote for {:?}, but they are a ghostie", player, victim);
return Err("Victim is a ghost.");
}
self.lynch_state.insert(player, LynchPlayerState {
picked: victim,
state: LynchState::Picked,
});
Ok(self.lynch_state.len())
}
pub fn counted_all_votes(&self) -> bool {
return self.state.players.len() == self.lynch_state.len();
}
pub fn can_anyone_vote(&self) -> bool {
!self.state.first_night
}
pub fn lynch_target(&mut self) -> Option<usize> {
let mut votes : HashMap<usize,usize> = HashMap::with_capacity(self.lynch_state.len());
for vote in self.lynch_state.values() {
if vote.state == LynchState::Picked {
let count = votes.entry(vote.picked).or_insert(0);
*count += 1;
}
}
let mut sorted_votes : Vec<_> = votes.iter().collect();
sorted_votes.sort_by(|a, b| b.1.cmp(a.1));
debug!(self.log, "Votes: {:?}", sorted_votes);
for (player, vote) in sorted_votes {
let percentage = (*vote as f32) / (self.lynch_state.len() as f32);
debug!(self.log, "{:?} got {:?} votes ({:?})", player, vote, percentage);
if percentage >= LINCH_PERCENTAGE {
//TODO: Add a defense stage
let plr = self.state.players.get_mut(*player).expect("Couldn't find player to lynch");
debug!(self.log, "Killed {:?} ({:?})", player, plr.name());
plr.kill();
return Some(*player);
}
}
None
}
}
impl<'a> Drop for MorningState<'a> {
fn drop(&mut self) {
self.state.first_night = false; // Before we leave, make sure the first night mod is off.
}
}
|
use std::fmt::{Debug, Formatter};
use ndarray::Array1;
pub type ActivationFn = fn(&Array1<f32>) -> Array1<f32>;
pub type DerivationFn = fn(&Array1<f32>) -> Array1<f32>;
#[derive(Clone, Copy)]
pub struct Activation {
activation: ActivationFn,
derivation: DerivationFn,
}
impl Debug for Activation {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Activation").finish()
}
}
impl Activation {
pub fn new(activation: ActivationFn, derivation: DerivationFn) -> Self {
Self {
activation,
derivation,
}
}
pub fn activate(&self, transfer: &Array1<f32>) -> Array1<f32> {
(self.activation)(transfer)
}
pub fn derive(&self, transfer: &Array1<f32>) -> Array1<f32> {
(self.derivation)(transfer)
}
}
|
use crate::constants::{CURVE_ORDER, GROUP_G2_SIZE, FIELD_ORDER_ELEMENT_SIZE};
use crate::errors::{SerzDeserzError, ValueError};
use crate::curve_order_elem::{CurveOrderElement, CurveOrderElementVector};
use crate::group_elem::{GroupElement, GroupElementVector};
use crate::types::{GroupG2, FP2, BigNum};
use crate::utils::hash_msg;
use std::iter;
use std::ops::{Add, AddAssign, Index, IndexMut, Mul, Neg, Sub, SubAssign};
use std::fmt;
use std::hash::{Hash, Hasher};
use std::slice::Iter;
use crate::group_elem_g1::parse_hex_as_fp;
use rayon::prelude::*;
use serde::{Serialize, Serializer, Deserialize, Deserializer};
use serde::de::{Error as DError, Visitor};
use std::str::SplitWhitespace;
use zeroize::Zeroize;
#[derive(Clone, Debug)]
pub struct G2 {
value: GroupG2,
}
impl GroupElement for G2 {
fn new() -> Self {
Self {
value: GroupG2::new(),
}
}
fn identity() -> Self {
let mut v = GroupG2::new();
v.inf();
Self { value: v }
}
/// This is an arbitrary choice. Any group element can be a generator
fn generator() -> Self {
GroupG2::generator().into()
}
fn is_identity(&self) -> bool {
self.value.is_infinity()
}
fn set_to_identity(&mut self) {
self.value.inf()
}
fn from_msg_hash(msg: &[u8]) -> Self {
GroupG2::mapit(&hash_msg(msg)).into()
}
/// TODO: call the appropriate function once implemented in `hash2curve` crate
fn hash_to_curve(_msg: &[u8], _dst: &hash2curve::DomainSeparationTag) -> Self {
unimplemented!();
}
fn to_vec(&self) -> Vec<u8> {
let mut bytes: [u8; GROUP_G2_SIZE] = [0; GROUP_G2_SIZE];
self.write_to_slice_unchecked(&mut bytes);
bytes.to_vec()
}
fn from_slice(bytes: &[u8]) -> Result<Self, SerzDeserzError> {
if bytes.len() != GROUP_G2_SIZE {
return Err(SerzDeserzError::G2BytesIncorrectSize(
bytes.len(),
GROUP_G2_SIZE,
));
}
Ok(GroupG2::frombytes(bytes).into())
}
fn write_to_slice(&self, target: &mut [u8]) -> Result<(), SerzDeserzError> {
if target.len() != GROUP_G2_SIZE {
return Err(SerzDeserzError::G2BytesIncorrectSize(
target.len(),
GROUP_G2_SIZE,
));
}
self.write_to_slice_unchecked(target);
Ok(())
}
fn write_to_slice_unchecked(&self, target: &mut [u8]) {
let mut temp = GroupG2::new();
temp.copy(&self.value);
temp.tobytes(target);
}
fn add_assign_(&mut self, b: &Self) {
self.value.add(&b.value);
}
fn sub_assign_(&mut self, b: &Self) {
self.value.sub(&b.value);
}
fn plus(&self, b: &Self) -> Self {
let mut sum = self.value.clone();
sum.add(&b.value);
sum.into()
}
fn minus(&self, b: &Self) -> Self {
let mut diff = self.value.clone();
diff.sub(&b.value);
diff.into()
}
fn scalar_mul_const_time(&self, a: &CurveOrderElement) -> Self {
self.value.mul(&a.to_bignum()).into()
}
fn double(&self) -> Self {
let mut d = self.value.clone();
d.dbl();
d.into()
}
fn double_mut(&mut self) {
self.value.dbl();
}
fn to_hex(&self) -> String {
self.value.to_hex()
}
fn from_hex(s: String) -> Result<Self, SerzDeserzError> {
let mut iter = s.split_whitespace();
let x = parse_hex_as_fp2(&mut iter)?;
let y = parse_hex_as_fp2(&mut iter)?;
let z = parse_hex_as_fp2(&mut iter)?;
let mut value = GroupG2::new();
value.setpx(x);
value.setpy(y);
value.setpz(z);
Ok(Self { value })
}
fn negation(&self) -> Self {
let mut n = self.to_ecp();
n.neg();
n.into()
}
fn is_extension() -> bool {
return true;
}
fn has_correct_order(&self) -> bool {
return self.value.mul(&CURVE_ORDER).is_infinity();
}
}
impl G2 {
pub fn to_bytes(&self) -> [u8; 4 * FIELD_ORDER_ELEMENT_SIZE] {
let mut bytes = [0u8; 4 * FIELD_ORDER_ELEMENT_SIZE];
self.value.tobytes(&mut bytes[..]);
bytes
}
pub fn to_compressed_bytes(&self) -> [u8; 2 * FIELD_ORDER_ELEMENT_SIZE] {
let mut bytes = [0u8; 2 * FIELD_ORDER_ELEMENT_SIZE];
let mut temp = GroupG2::new();
temp.copy(&self.value);
temp.affine();
temp.x.geta().tobytes(&mut bytes[..FIELD_ORDER_ELEMENT_SIZE]);
temp.x.getb().tobytes(&mut bytes[FIELD_ORDER_ELEMENT_SIZE..]);
let a = temp.y.geta().parity() as u8;
let b = temp.y.getb().parity() as u8;
let parity = a << 1 | b;
bytes[0] |= parity << 6;
bytes
}
}
impl From<[u8; 2*FIELD_ORDER_ELEMENT_SIZE]> for G2 {
fn from(data: [u8; 2*FIELD_ORDER_ELEMENT_SIZE]) -> Self {
Self::from(&data)
}
}
impl From<&[u8; 2*FIELD_ORDER_ELEMENT_SIZE]> for G2 {
fn from(data: &[u8; 2*FIELD_ORDER_ELEMENT_SIZE]) -> Self {
let mut temp = data.clone();
let parity = (temp[0] >> 6) & 3u8;
let pa = if parity & 2u8 == 2 { 1 } else { 0 };
let pb = if parity & 1u8 == 1 { 1 } else { 0 };
temp[0] &= 0x3F;
let mut a = BigNum::frombytes(&temp[..FIELD_ORDER_ELEMENT_SIZE]);
let mut b = BigNum::frombytes(&temp[FIELD_ORDER_ELEMENT_SIZE..]);
a.norm();
b.norm();
let mut x = FP2::new_bigs(&a, &b);
x.norm();
let mut y = GroupG2::rhs(&x);
if y.sqrt() {
if y.geta().parity() != pa {
y.a.neg();
}
if y.getb().parity() != pb {
y.b.neg();
}
y.reduce();
}
let g2 = GroupG2::new_fp2s(&x, &y);
Self { value: g2 }
}
}
/// Parse given hex string as FP2
pub fn parse_hex_as_fp2(iter: &mut SplitWhitespace) -> Result<FP2, SerzDeserzError> {
// Logic almost copied from AMCL but with error handling and constant time execution.
// Constant time is important as hex is used during serialization and deserialization.
// A seemingly effortless solution is to filter string for errors and pad with 0s before
// passing to AMCL but that would be expensive as the string is scanned twice
let a = parse_hex_as_fp(iter)?;
let b = parse_hex_as_fp(iter)?;
let mut fp2 = FP2::new();
fp2.seta(a);
fp2.setb(b);
Ok(fp2)
}
impl_group_elem_traits!(G2, GroupG2);
impl_group_elem_conversions!(G2, GroupG2, GROUP_G2_SIZE);
impl_group_elem_ops!(G2);
impl_scalar_mul_ops!(G2);
impl_group_element_lookup_table!(G2, G2LookupTable);
// Represents an element of the sub-group of the elliptic curve over prime the extension field
impl_optmz_scalar_mul_ops!(G2, GroupG2, G2LookupTable);
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct G2Vector {
elems: Vec<G2>,
}
impl_group_elem_vec_ops!(G2, G2Vector);
impl_group_elem_vec_product_ops!(G2, G2Vector, G2LookupTable);
impl_group_elem_vec_conversions!(G2, G2Vector);
impl G2 {
/// Computes sum of 2 scalar multiplications.
/// Faster than doing the scalar multiplications individually and then adding them. Uses lookup table
/// returns self*a + h*b
pub fn binary_scalar_mul(&self, h: &Self, a: &CurveOrderElement, b: &CurveOrderElement) -> Self {
// TODO: Replace with faster
let group_elems = iter::once(self).chain(iter::once(h));
let field_elems = iter::once(a).chain(iter::once(b));
G2Vector::multi_scalar_mul_const_time_without_precomputation(group_elems, field_elems)
.unwrap()
}
}
#[cfg(test)]
mod test {
use super::G2;
use crate::group_elem::GroupElement;
use crate::curve_order_elem::CurveOrderElement;
#[test]
fn test_compression() {
let g2 = G2::generator();
let bytes = g2.to_compressed_bytes();
assert_eq!(G2::from(bytes), g2);
for _ in 0..30 {
let sk = CurveOrderElement::random();
let pk = &g2 * &sk;
let bytes = pk.to_compressed_bytes();
let t = G2::from(bytes);
assert_eq!(t, pk);
}
}
#[test]
fn test_parse_hex_for_fp2() {
// TODO:
}
#[test]
fn test_parse_bad_hex_for_fp2() {
// TODO:
}
}
|
#![feature(slice_patterns)]
#![feature(advanced_slice_patterns)]
fn main() {
normal();
overlap();
destructure();
pattern_guards();
}
fn overlap() {
let x = 4;
match x {
3...10 => println!("big"),
0...6 => println!("small"),
_ => println!("error")
}
}
fn normal() {
let number = 13;
// TODO ^ Try different values for `number`
println!("Tell me about {}", number);
match number {
// Match a single value
1 => println!("One!"),
// Match several values
2 | 3 | 5 | 7 | 11 => println!("This is a prime"),
// Match an inclusive range
13...19 => println!("A teen"),
// Handle the rest of cases
_ => println!("Ain't special"),
}
let boolean = true;
// Match is an expression too
let binary = match boolean {
// The arms of a match must cover all the possible values
false => 0,
true => 1,
// TODO ^ Try commenting out one of these arms
};
println!("{} -> {}", boolean, binary);
}
fn destructure() {
// rfc 0495 array pattern changed
// tuple / struct / array
let v = [1,2,3,4,5];
match &v {
&[1, ..] => println!("{}", "starts with 1"),
//[.., 3, ..] => println!("{}", "contains 3"), // error, '..' can be used at most once
&[_, _, 3, _, _] => println!("{}", "exact 5 elements, and the center is 3"),
&[.., ref end] =>println!("ends with {}", end),
}
}
fn pattern_guards() {
let s = "hello world";
match &s {
s if s.starts_with('h') => println!("{:?}", s.chars().nth(0)),
_ => println!("{}", "default arm")
}
} |
#[doc = "Register `DDRCTRL_MSTR` reader"]
pub type R = crate::R<DDRCTRL_MSTR_SPEC>;
#[doc = "Register `DDRCTRL_MSTR` writer"]
pub type W = crate::W<DDRCTRL_MSTR_SPEC>;
#[doc = "Field `DDR3` reader - DDR3"]
pub type DDR3_R = crate::BitReader;
#[doc = "Field `DDR3` writer - DDR3"]
pub type DDR3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPDDR2` reader - LPDDR2"]
pub type LPDDR2_R = crate::BitReader;
#[doc = "Field `LPDDR2` writer - LPDDR2"]
pub type LPDDR2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LPDDR3` reader - LPDDR3"]
pub type LPDDR3_R = crate::BitReader;
#[doc = "Field `LPDDR3` writer - LPDDR3"]
pub type LPDDR3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BURSTCHOP` reader - BURSTCHOP"]
pub type BURSTCHOP_R = crate::BitReader;
#[doc = "Field `BURSTCHOP` writer - BURSTCHOP"]
pub type BURSTCHOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EN_2T_TIMING_MODE` reader - EN_2T_TIMING_MODE"]
pub type EN_2T_TIMING_MODE_R = crate::BitReader;
#[doc = "Field `EN_2T_TIMING_MODE` writer - EN_2T_TIMING_MODE"]
pub type EN_2T_TIMING_MODE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DATA_BUS_WIDTH` reader - DATA_BUS_WIDTH"]
pub type DATA_BUS_WIDTH_R = crate::FieldReader;
#[doc = "Field `DATA_BUS_WIDTH` writer - DATA_BUS_WIDTH"]
pub type DATA_BUS_WIDTH_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `DLL_OFF_MODE` reader - DLL_OFF_MODE"]
pub type DLL_OFF_MODE_R = crate::BitReader;
#[doc = "Field `DLL_OFF_MODE` writer - DLL_OFF_MODE"]
pub type DLL_OFF_MODE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BURST_RDWR` reader - BURST_RDWR"]
pub type BURST_RDWR_R = crate::FieldReader;
#[doc = "Field `BURST_RDWR` writer - BURST_RDWR"]
pub type BURST_RDWR_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
impl R {
#[doc = "Bit 0 - DDR3"]
#[inline(always)]
pub fn ddr3(&self) -> DDR3_R {
DDR3_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 2 - LPDDR2"]
#[inline(always)]
pub fn lpddr2(&self) -> LPDDR2_R {
LPDDR2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - LPDDR3"]
#[inline(always)]
pub fn lpddr3(&self) -> LPDDR3_R {
LPDDR3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 9 - BURSTCHOP"]
#[inline(always)]
pub fn burstchop(&self) -> BURSTCHOP_R {
BURSTCHOP_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - EN_2T_TIMING_MODE"]
#[inline(always)]
pub fn en_2t_timing_mode(&self) -> EN_2T_TIMING_MODE_R {
EN_2T_TIMING_MODE_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bits 12:13 - DATA_BUS_WIDTH"]
#[inline(always)]
pub fn data_bus_width(&self) -> DATA_BUS_WIDTH_R {
DATA_BUS_WIDTH_R::new(((self.bits >> 12) & 3) as u8)
}
#[doc = "Bit 15 - DLL_OFF_MODE"]
#[inline(always)]
pub fn dll_off_mode(&self) -> DLL_OFF_MODE_R {
DLL_OFF_MODE_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bits 16:19 - BURST_RDWR"]
#[inline(always)]
pub fn burst_rdwr(&self) -> BURST_RDWR_R {
BURST_RDWR_R::new(((self.bits >> 16) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bit 0 - DDR3"]
#[inline(always)]
#[must_use]
pub fn ddr3(&mut self) -> DDR3_W<DDRCTRL_MSTR_SPEC, 0> {
DDR3_W::new(self)
}
#[doc = "Bit 2 - LPDDR2"]
#[inline(always)]
#[must_use]
pub fn lpddr2(&mut self) -> LPDDR2_W<DDRCTRL_MSTR_SPEC, 2> {
LPDDR2_W::new(self)
}
#[doc = "Bit 3 - LPDDR3"]
#[inline(always)]
#[must_use]
pub fn lpddr3(&mut self) -> LPDDR3_W<DDRCTRL_MSTR_SPEC, 3> {
LPDDR3_W::new(self)
}
#[doc = "Bit 9 - BURSTCHOP"]
#[inline(always)]
#[must_use]
pub fn burstchop(&mut self) -> BURSTCHOP_W<DDRCTRL_MSTR_SPEC, 9> {
BURSTCHOP_W::new(self)
}
#[doc = "Bit 10 - EN_2T_TIMING_MODE"]
#[inline(always)]
#[must_use]
pub fn en_2t_timing_mode(&mut self) -> EN_2T_TIMING_MODE_W<DDRCTRL_MSTR_SPEC, 10> {
EN_2T_TIMING_MODE_W::new(self)
}
#[doc = "Bits 12:13 - DATA_BUS_WIDTH"]
#[inline(always)]
#[must_use]
pub fn data_bus_width(&mut self) -> DATA_BUS_WIDTH_W<DDRCTRL_MSTR_SPEC, 12> {
DATA_BUS_WIDTH_W::new(self)
}
#[doc = "Bit 15 - DLL_OFF_MODE"]
#[inline(always)]
#[must_use]
pub fn dll_off_mode(&mut self) -> DLL_OFF_MODE_W<DDRCTRL_MSTR_SPEC, 15> {
DLL_OFF_MODE_W::new(self)
}
#[doc = "Bits 16:19 - BURST_RDWR"]
#[inline(always)]
#[must_use]
pub fn burst_rdwr(&mut self) -> BURST_RDWR_W<DDRCTRL_MSTR_SPEC, 16> {
BURST_RDWR_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DDRCTRL master register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ddrctrl_mstr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ddrctrl_mstr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DDRCTRL_MSTR_SPEC;
impl crate::RegisterSpec for DDRCTRL_MSTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ddrctrl_mstr::R`](R) reader structure"]
impl crate::Readable for DDRCTRL_MSTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ddrctrl_mstr::W`](W) writer structure"]
impl crate::Writable for DDRCTRL_MSTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DDRCTRL_MSTR to value 0x0004_0001"]
impl crate::Resettable for DDRCTRL_MSTR_SPEC {
const RESET_VALUE: Self::Ux = 0x0004_0001;
}
|
/// An enum to represent all characters in the NyiakengPuachueHmong block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum NyiakengPuachueHmong {
/// \u{1e100}: '𞄀'
LetterMa,
/// \u{1e101}: '𞄁'
LetterTsa,
/// \u{1e102}: '𞄂'
LetterNta,
/// \u{1e103}: '𞄃'
LetterTa,
/// \u{1e104}: '𞄄'
LetterHa,
/// \u{1e105}: '𞄅'
LetterNa,
/// \u{1e106}: '𞄆'
LetterXa,
/// \u{1e107}: '𞄇'
LetterNka,
/// \u{1e108}: '𞄈'
LetterCa,
/// \u{1e109}: '𞄉'
LetterLa,
/// \u{1e10a}: '𞄊'
LetterSa,
/// \u{1e10b}: '𞄋'
LetterZa,
/// \u{1e10c}: '𞄌'
LetterNca,
/// \u{1e10d}: '𞄍'
LetterNtsa,
/// \u{1e10e}: '𞄎'
LetterKa,
/// \u{1e10f}: '𞄏'
LetterDa,
/// \u{1e110}: '𞄐'
LetterNya,
/// \u{1e111}: '𞄑'
LetterNra,
/// \u{1e112}: '𞄒'
LetterVa,
/// \u{1e113}: '𞄓'
LetterNtxa,
/// \u{1e114}: '𞄔'
LetterTxa,
/// \u{1e115}: '𞄕'
LetterFa,
/// \u{1e116}: '𞄖'
LetterRa,
/// \u{1e117}: '𞄗'
LetterQa,
/// \u{1e118}: '𞄘'
LetterYa,
/// \u{1e119}: '𞄙'
LetterNqa,
/// \u{1e11a}: '𞄚'
LetterPa,
/// \u{1e11b}: '𞄛'
LetterXya,
/// \u{1e11c}: '𞄜'
LetterNpa,
/// \u{1e11d}: '𞄝'
LetterDla,
/// \u{1e11e}: '𞄞'
LetterNpla,
/// \u{1e11f}: '𞄟'
LetterHah,
/// \u{1e120}: '𞄠'
LetterMla,
/// \u{1e121}: '𞄡'
LetterPla,
/// \u{1e122}: '𞄢'
LetterGa,
/// \u{1e123}: '𞄣'
LetterRra,
/// \u{1e124}: '𞄤'
LetterA,
/// \u{1e125}: '𞄥'
LetterAa,
/// \u{1e126}: '𞄦'
LetterI,
/// \u{1e127}: '𞄧'
LetterU,
/// \u{1e128}: '𞄨'
LetterO,
/// \u{1e129}: '𞄩'
LetterOo,
/// \u{1e12a}: '𞄪'
LetterE,
/// \u{1e12b}: '𞄫'
LetterEe,
/// \u{1e12c}: '𞄬'
LetterW,
/// \u{1e130}: '𞄰'
ToneDashB,
/// \u{1e131}: '𞄱'
ToneDashM,
/// \u{1e132}: '𞄲'
ToneDashJ,
/// \u{1e133}: '𞄳'
ToneDashV,
/// \u{1e134}: '𞄴'
ToneDashS,
/// \u{1e135}: '𞄵'
ToneDashG,
/// \u{1e136}: '𞄶'
ToneDashD,
/// \u{1e137}: '𞄷'
SignForPerson,
/// \u{1e138}: '𞄸'
SignForThing,
/// \u{1e139}: '𞄹'
SignForLocation,
/// \u{1e13a}: '𞄺'
SignForAnimal,
/// \u{1e13b}: '𞄻'
SignForInvertebrate,
/// \u{1e13c}: '𞄼'
SignXwXw,
/// \u{1e13d}: '𞄽'
SyllableLengthener,
/// \u{1e140}: '𞅀'
DigitZero,
/// \u{1e141}: '𞅁'
DigitOne,
/// \u{1e142}: '𞅂'
DigitTwo,
/// \u{1e143}: '𞅃'
DigitThree,
/// \u{1e144}: '𞅄'
DigitFour,
/// \u{1e145}: '𞅅'
DigitFive,
/// \u{1e146}: '𞅆'
DigitSix,
/// \u{1e147}: '𞅇'
DigitSeven,
/// \u{1e148}: '𞅈'
DigitEight,
/// \u{1e149}: '𞅉'
DigitNine,
/// \u{1e14e}: '𞅎'
LogogramNyaj,
}
impl Into<char> for NyiakengPuachueHmong {
fn into(self) -> char {
match self {
NyiakengPuachueHmong::LetterMa => '𞄀',
NyiakengPuachueHmong::LetterTsa => '𞄁',
NyiakengPuachueHmong::LetterNta => '𞄂',
NyiakengPuachueHmong::LetterTa => '𞄃',
NyiakengPuachueHmong::LetterHa => '𞄄',
NyiakengPuachueHmong::LetterNa => '𞄅',
NyiakengPuachueHmong::LetterXa => '𞄆',
NyiakengPuachueHmong::LetterNka => '𞄇',
NyiakengPuachueHmong::LetterCa => '𞄈',
NyiakengPuachueHmong::LetterLa => '𞄉',
NyiakengPuachueHmong::LetterSa => '𞄊',
NyiakengPuachueHmong::LetterZa => '𞄋',
NyiakengPuachueHmong::LetterNca => '𞄌',
NyiakengPuachueHmong::LetterNtsa => '𞄍',
NyiakengPuachueHmong::LetterKa => '𞄎',
NyiakengPuachueHmong::LetterDa => '𞄏',
NyiakengPuachueHmong::LetterNya => '𞄐',
NyiakengPuachueHmong::LetterNra => '𞄑',
NyiakengPuachueHmong::LetterVa => '𞄒',
NyiakengPuachueHmong::LetterNtxa => '𞄓',
NyiakengPuachueHmong::LetterTxa => '𞄔',
NyiakengPuachueHmong::LetterFa => '𞄕',
NyiakengPuachueHmong::LetterRa => '𞄖',
NyiakengPuachueHmong::LetterQa => '𞄗',
NyiakengPuachueHmong::LetterYa => '𞄘',
NyiakengPuachueHmong::LetterNqa => '𞄙',
NyiakengPuachueHmong::LetterPa => '𞄚',
NyiakengPuachueHmong::LetterXya => '𞄛',
NyiakengPuachueHmong::LetterNpa => '𞄜',
NyiakengPuachueHmong::LetterDla => '𞄝',
NyiakengPuachueHmong::LetterNpla => '𞄞',
NyiakengPuachueHmong::LetterHah => '𞄟',
NyiakengPuachueHmong::LetterMla => '𞄠',
NyiakengPuachueHmong::LetterPla => '𞄡',
NyiakengPuachueHmong::LetterGa => '𞄢',
NyiakengPuachueHmong::LetterRra => '𞄣',
NyiakengPuachueHmong::LetterA => '𞄤',
NyiakengPuachueHmong::LetterAa => '𞄥',
NyiakengPuachueHmong::LetterI => '𞄦',
NyiakengPuachueHmong::LetterU => '𞄧',
NyiakengPuachueHmong::LetterO => '𞄨',
NyiakengPuachueHmong::LetterOo => '𞄩',
NyiakengPuachueHmong::LetterE => '𞄪',
NyiakengPuachueHmong::LetterEe => '𞄫',
NyiakengPuachueHmong::LetterW => '𞄬',
NyiakengPuachueHmong::ToneDashB => '𞄰',
NyiakengPuachueHmong::ToneDashM => '𞄱',
NyiakengPuachueHmong::ToneDashJ => '𞄲',
NyiakengPuachueHmong::ToneDashV => '𞄳',
NyiakengPuachueHmong::ToneDashS => '𞄴',
NyiakengPuachueHmong::ToneDashG => '𞄵',
NyiakengPuachueHmong::ToneDashD => '𞄶',
NyiakengPuachueHmong::SignForPerson => '𞄷',
NyiakengPuachueHmong::SignForThing => '𞄸',
NyiakengPuachueHmong::SignForLocation => '𞄹',
NyiakengPuachueHmong::SignForAnimal => '𞄺',
NyiakengPuachueHmong::SignForInvertebrate => '𞄻',
NyiakengPuachueHmong::SignXwXw => '𞄼',
NyiakengPuachueHmong::SyllableLengthener => '𞄽',
NyiakengPuachueHmong::DigitZero => '𞅀',
NyiakengPuachueHmong::DigitOne => '𞅁',
NyiakengPuachueHmong::DigitTwo => '𞅂',
NyiakengPuachueHmong::DigitThree => '𞅃',
NyiakengPuachueHmong::DigitFour => '𞅄',
NyiakengPuachueHmong::DigitFive => '𞅅',
NyiakengPuachueHmong::DigitSix => '𞅆',
NyiakengPuachueHmong::DigitSeven => '𞅇',
NyiakengPuachueHmong::DigitEight => '𞅈',
NyiakengPuachueHmong::DigitNine => '𞅉',
NyiakengPuachueHmong::LogogramNyaj => '𞅎',
}
}
}
impl std::convert::TryFrom<char> for NyiakengPuachueHmong {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'𞄀' => Ok(NyiakengPuachueHmong::LetterMa),
'𞄁' => Ok(NyiakengPuachueHmong::LetterTsa),
'𞄂' => Ok(NyiakengPuachueHmong::LetterNta),
'𞄃' => Ok(NyiakengPuachueHmong::LetterTa),
'𞄄' => Ok(NyiakengPuachueHmong::LetterHa),
'𞄅' => Ok(NyiakengPuachueHmong::LetterNa),
'𞄆' => Ok(NyiakengPuachueHmong::LetterXa),
'𞄇' => Ok(NyiakengPuachueHmong::LetterNka),
'𞄈' => Ok(NyiakengPuachueHmong::LetterCa),
'𞄉' => Ok(NyiakengPuachueHmong::LetterLa),
'𞄊' => Ok(NyiakengPuachueHmong::LetterSa),
'𞄋' => Ok(NyiakengPuachueHmong::LetterZa),
'𞄌' => Ok(NyiakengPuachueHmong::LetterNca),
'𞄍' => Ok(NyiakengPuachueHmong::LetterNtsa),
'𞄎' => Ok(NyiakengPuachueHmong::LetterKa),
'𞄏' => Ok(NyiakengPuachueHmong::LetterDa),
'𞄐' => Ok(NyiakengPuachueHmong::LetterNya),
'𞄑' => Ok(NyiakengPuachueHmong::LetterNra),
'𞄒' => Ok(NyiakengPuachueHmong::LetterVa),
'𞄓' => Ok(NyiakengPuachueHmong::LetterNtxa),
'𞄔' => Ok(NyiakengPuachueHmong::LetterTxa),
'𞄕' => Ok(NyiakengPuachueHmong::LetterFa),
'𞄖' => Ok(NyiakengPuachueHmong::LetterRa),
'𞄗' => Ok(NyiakengPuachueHmong::LetterQa),
'𞄘' => Ok(NyiakengPuachueHmong::LetterYa),
'𞄙' => Ok(NyiakengPuachueHmong::LetterNqa),
'𞄚' => Ok(NyiakengPuachueHmong::LetterPa),
'𞄛' => Ok(NyiakengPuachueHmong::LetterXya),
'𞄜' => Ok(NyiakengPuachueHmong::LetterNpa),
'𞄝' => Ok(NyiakengPuachueHmong::LetterDla),
'𞄞' => Ok(NyiakengPuachueHmong::LetterNpla),
'𞄟' => Ok(NyiakengPuachueHmong::LetterHah),
'𞄠' => Ok(NyiakengPuachueHmong::LetterMla),
'𞄡' => Ok(NyiakengPuachueHmong::LetterPla),
'𞄢' => Ok(NyiakengPuachueHmong::LetterGa),
'𞄣' => Ok(NyiakengPuachueHmong::LetterRra),
'𞄤' => Ok(NyiakengPuachueHmong::LetterA),
'𞄥' => Ok(NyiakengPuachueHmong::LetterAa),
'𞄦' => Ok(NyiakengPuachueHmong::LetterI),
'𞄧' => Ok(NyiakengPuachueHmong::LetterU),
'𞄨' => Ok(NyiakengPuachueHmong::LetterO),
'𞄩' => Ok(NyiakengPuachueHmong::LetterOo),
'𞄪' => Ok(NyiakengPuachueHmong::LetterE),
'𞄫' => Ok(NyiakengPuachueHmong::LetterEe),
'𞄬' => Ok(NyiakengPuachueHmong::LetterW),
'𞄰' => Ok(NyiakengPuachueHmong::ToneDashB),
'𞄱' => Ok(NyiakengPuachueHmong::ToneDashM),
'𞄲' => Ok(NyiakengPuachueHmong::ToneDashJ),
'𞄳' => Ok(NyiakengPuachueHmong::ToneDashV),
'𞄴' => Ok(NyiakengPuachueHmong::ToneDashS),
'𞄵' => Ok(NyiakengPuachueHmong::ToneDashG),
'𞄶' => Ok(NyiakengPuachueHmong::ToneDashD),
'𞄷' => Ok(NyiakengPuachueHmong::SignForPerson),
'𞄸' => Ok(NyiakengPuachueHmong::SignForThing),
'𞄹' => Ok(NyiakengPuachueHmong::SignForLocation),
'𞄺' => Ok(NyiakengPuachueHmong::SignForAnimal),
'𞄻' => Ok(NyiakengPuachueHmong::SignForInvertebrate),
'𞄼' => Ok(NyiakengPuachueHmong::SignXwXw),
'𞄽' => Ok(NyiakengPuachueHmong::SyllableLengthener),
'𞅀' => Ok(NyiakengPuachueHmong::DigitZero),
'𞅁' => Ok(NyiakengPuachueHmong::DigitOne),
'𞅂' => Ok(NyiakengPuachueHmong::DigitTwo),
'𞅃' => Ok(NyiakengPuachueHmong::DigitThree),
'𞅄' => Ok(NyiakengPuachueHmong::DigitFour),
'𞅅' => Ok(NyiakengPuachueHmong::DigitFive),
'𞅆' => Ok(NyiakengPuachueHmong::DigitSix),
'𞅇' => Ok(NyiakengPuachueHmong::DigitSeven),
'𞅈' => Ok(NyiakengPuachueHmong::DigitEight),
'𞅉' => Ok(NyiakengPuachueHmong::DigitNine),
'𞅎' => Ok(NyiakengPuachueHmong::LogogramNyaj),
_ => Err(()),
}
}
}
impl Into<u32> for NyiakengPuachueHmong {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for NyiakengPuachueHmong {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for NyiakengPuachueHmong {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl NyiakengPuachueHmong {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
NyiakengPuachueHmong::LetterMa
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("NyiakengPuachueHmong{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
enum_impl! {
/// Object Format
ObjFmt {
/// Unknown format
Unknown => "unknown",
/// GOFF (IBM OS/360)
GOFF => "goff",
/// COFF Common Object File Format (Unix System V R4, Windows)
COFF => "coff",
/// ELF Executable and Linkable Format
ELF => "elf",
/// MACHO (NeXT, Apple MacOSX, iOS, ...)
MachO => "macho",
/// Wasm
Wasm => "wasm",
/// XCOFF (IBM AIX, BeOS, MacOS, )
XCOFF => "xcoff",
}
}
impl ObjFmt {
pub fn extensions(&self) -> &'static [&'static str] {
match self {
Self::COFF => &["", "o", "obj"],
Self::ELF => &[
"", "axf", "bin", "elf", "o", "prx", "puff", "ko", "mod", "so",
],
Self::MachO => &["", "o", "dylib", "bundle"],
_ => &[],
}
}
pub fn magic(&self) -> &'static [u8] {
match self {
Self::ELF => &[0x7f, b'E', b'L', b'F'],
Self::MachO => &[0xfe, 0xed, 0xfa],
Self::Wasm => &[0x0, 0x61, 0x73, 0x6d],
_ => &[],
}
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// GroupWidgetDefinition : The groups widget allows you to keep similar graphs together on your timeboard. Each group has a custom header, can hold one to many graphs, and is collapsible.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GroupWidgetDefinition {
/// Background color of the group title.
#[serde(rename = "background_color", skip_serializing_if = "Option::is_none")]
pub background_color: Option<String>,
/// URL of image to display as a banner for the group.
#[serde(rename = "banner_img", skip_serializing_if = "Option::is_none")]
pub banner_img: Option<String>,
#[serde(rename = "layout_type")]
pub layout_type: crate::models::WidgetLayoutType,
/// Whether to show the title or not.
#[serde(rename = "show_title", skip_serializing_if = "Option::is_none")]
pub show_title: Option<bool>,
/// Title of the widget.
#[serde(rename = "title", skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(rename = "title_align", skip_serializing_if = "Option::is_none")]
pub title_align: Option<crate::models::WidgetTextAlign>,
#[serde(rename = "type")]
pub _type: crate::models::GroupWidgetDefinitionType,
/// List of widget groups.
#[serde(rename = "widgets")]
pub widgets: Vec<crate::models::Widget>,
}
impl GroupWidgetDefinition {
/// The groups widget allows you to keep similar graphs together on your timeboard. Each group has a custom header, can hold one to many graphs, and is collapsible.
pub fn new(layout_type: crate::models::WidgetLayoutType, _type: crate::models::GroupWidgetDefinitionType, widgets: Vec<crate::models::Widget>) -> GroupWidgetDefinition {
GroupWidgetDefinition {
background_color: None,
banner_img: None,
layout_type,
show_title: None,
title: None,
title_align: None,
_type,
widgets,
}
}
}
|
pub mod managers;
pub mod renderer;
pub use crate::renderer::managers::*;
pub use crate::renderer::renderer::*;
|
//! Auto solvers automatically find which of their child solvers is installed on
//! the user's computer and uses it. The [AllSolvers] solvers tries all the supported solvers.
use crate::lp_format::{LpObjective, LpProblem};
use crate::problem::{Problem, StrExpression, Variable};
#[cfg(feature = "cplex")]
use crate::solvers::cplex::Cplex;
use crate::solvers::{CbcSolver, GlpkSolver, GurobiSolver, Solution};
use super::SolverTrait;
/// A solver that tries multiple solvers
#[derive(Debug, Clone)]
pub struct AutoSolver<SOLVER, NEXT>(SOLVER, NEXT);
/// The tail of a list of solvers. This one has no children and never finds any solver.
#[derive(Debug, Clone, Default)]
pub struct NoSolver;
#[cfg(not(feature = "cplex"))]
type Cplex = NoSolver;
/// An [AutoSolver] that tries, in order: Gurobi, Cplex, Cbc and Glpk
pub type AllSolvers = AutoSolver<
GurobiSolver,
AutoSolver<Cplex, AutoSolver<CbcSolver, AutoSolver<GlpkSolver, NoSolver>>>,
>;
impl SolverTrait for NoSolver {
fn run<'a, P: LpProblem<'a>>(&self, _problem: &'a P) -> Result<Solution, String> {
Err("No solver available".to_string())
}
}
/// The default AutoSolver contains all supported solvers
impl<A: Default, B: Default> Default for AutoSolver<A, B> {
fn default() -> Self {
AutoSolver(A::default(), B::default())
}
}
impl<SOLVER: Default, NEXT: Default> AutoSolver<SOLVER, NEXT> {
/// Instantiate an AutoSolver with all supported solvers
pub fn new() -> Self {
Self::default()
}
/// Instantiate an AutoSolver with the given solvers
pub fn with_solver<NewSolver>(self, solver: NewSolver) -> AutoSolver<NewSolver, Self> {
AutoSolver(solver, self)
}
}
impl<S: SolverTrait, T: SolverTrait> SolverTrait for AutoSolver<S, T> {
fn run<'a, P: LpProblem<'a>>(&self, problem: &'a P) -> Result<Solution, String> {
// Try solving a dummy problem (to avoid writing a large problem to disk if not necessary)
let works = self
.0
.run(&Problem {
name: "dummy".to_string(),
sense: LpObjective::Minimize,
objective: StrExpression("x".to_string()),
variables: vec![Variable {
name: "x".to_string(),
is_integer: false,
lower_bound: 0.0,
upper_bound: 1.0,
}],
constraints: vec![],
})
.is_ok();
if works {
self.0.run(problem)
} else {
self.1.run(problem)
}
}
}
|
use rand::Rng;
use::minifb::{
Key,
Window
};
use crate::ram::Ram;
use crate::PROGRAM_START_ADDR;
use crate::NUM_GPR;
use crate::HEIGHT;
use crate::WIDTH;
use crate::PX_OFF;
use crate::PX_ON;
pub struct Cpu {
// 16 8 bit general purpose registers
reg_gpr: [u8; NUM_GPR],
// 1 16 bit register, i
reg_i: u16,
// 2 special purpose 8 bit registers
// delay and sound timers
reg_dt: u8,
reg_st: u8,
// 16 bit program counter
reg_pc: u16,
// 8 bit stack pointer
sp: u8,
// stack
stack: [u8; 16],
// 8 bit graphics (gfx) array
gfx: [u32; 64 * 32],
// keyboard handling
keys: [u8; 16]
}
impl Cpu {
pub fn new() -> Cpu {
Cpu {
reg_gpr: [0; 16],
reg_i: 0,
reg_pc: PROGRAM_START_ADDR,
sp: 0,
stack: [0; 16],
reg_dt: 0,
reg_st: 0,
gfx: [PX_OFF; 64 * 32],
keys: [0; 16]
}
}
pub fn reg_dt(&self) -> u8 {
self.reg_dt
}
pub fn reg_st(&self) -> u8 {
self.reg_st
}
pub fn reg_pc(&self) -> u16 {
self.reg_pc
}
pub fn gfx(&self) -> [u32; 2048] {
self.gfx
}
pub fn set_reg_dt(&mut self, reg_dt: u8) {
self.reg_dt = reg_dt;
}
pub fn set_reg_st(&mut self, reg_st: u8) {
self.reg_st = reg_st;
}
pub fn get_reg_vx(&self, opcode: u16) -> u8 {
return self.reg_gpr[((opcode & 0x0F00) >> 8) as usize];
}
pub fn set_reg_vx(&mut self, opcode: u16, value: u8) {
self.reg_gpr[((opcode & 0x0F00) >> 8) as usize] = value;
}
pub fn get_reg_vy(&self, opcode: u16) -> u8 {
return self.reg_gpr[((opcode & 0x00F0) >> 4) as usize];
}
pub fn set_reg_vy(&mut self, opcode: u16, value: u8) {
self.reg_gpr[((opcode & 0x00F0) >> 4) as usize] = value;
}
pub fn handle_keypress(&mut self, window: &Window) -> [u8; 16] {
self.keys = [0; 16];
window.get_keys().map(|keys_received| {
for k in keys_received {
match k {
Key::Key1 => self.keys[0x1] = 1,
Key::Key2 => self.keys[0x2] = 1,
Key::Key3 => self.keys[0x3] = 1,
Key::Key4 => self.keys[0xC] = 1,
Key::Q => self.keys[0x4] = 1,
Key::W => self.keys[0x5] = 1,
Key::E => self.keys[0x6] = 1,
Key::R => self.keys[0xD] = 1,
Key::A => self.keys[0x7] = 1,
Key::S => self.keys[0x8] = 1,
Key::D => self.keys[0x9] = 1,
Key::F => self.keys[0xE] = 1,
Key::Z => self.keys[0xA] = 1,
Key::X => self.keys[0x0] = 1,
Key::C => self.keys[0xB] = 1,
Key::V => self.keys[0xF] = 1,
_ => () // noop
}
}
});
self.keys
}
pub fn run_instruction(&mut self, ram: &mut Ram) {
// fetch opcode Big Endian
let hi = ram.read_byte(self.reg_pc) as u16;
let lo = ram.read_byte(self.reg_pc+1) as u16;
let instruction: u16 = (hi << 8) | lo;
// decode and execute the opcode
let reg_vx = self.get_reg_vx(instruction);
let reg_vy = self.get_reg_vy(instruction);
let nnn = instruction & 0x0FFF;
let nn: u8 = (instruction & 0x00FF) as u8;
let reg_v0 = self.reg_gpr[0] as u16;
match instruction & 0xF000 {
0x0000 => match instruction & 0x000F {
0x0000 => {
// 0x00E0: clear screen
for index in 0..2048 {
self.gfx[index] = PX_OFF;
}
self.reg_pc += 2;
},
0x000E => {
// 0x00EE: return from subroutine
// restores program counter and then removes stack address
self.sp -= 1;
self.reg_pc = self.stack[self.sp as usize] as u16;
self.reg_pc += 2;
},
_ => println!("Invalid opcode {}", instruction)
},
0x1000 => {
// 0x1NNN: jumps to address NNN
self.reg_pc = nnn;
},
0x2000 => {
// 0x2NNN: calls subroutine at NNN
self.stack[self.sp as usize] = self.reg_pc as u8;
self.sp += 1;
self.reg_pc = instruction & 0x0FFF;
},
0x3000 => {
// 0x3XNN: skips the next instruction if VX === NNN
if reg_vx == nn {
self.reg_pc += 2;
}
self.reg_pc += 2;
},
0x4000 => {
// 0x4XNN: skips the next instruction if VX !== NNN
if reg_vx != nn {
self.reg_pc += 2;
}
self.reg_pc += 2;
},
0x5000 => {
// 0x5XY0: skips the next instruction if VX === VY
if reg_vx == reg_vy {
self.reg_pc += 2;
}
self.reg_pc += 2;
},
0x6000 => {
// 0x6XNN: sets VX to NN
self.set_reg_vx(instruction, nn);
self.reg_pc += 2;
},
0x7000 => {
// 0x7XNN: Adds NN to VX (carry flag is not changed)
self.set_reg_vx(instruction, reg_vx.wrapping_add(nn));
self.reg_pc += 2;
},
0x8000 => {
match instruction & 0x000F {
0x0000 => {
// 0x8XY0: sets VX = VY
self.set_reg_vx(instruction, reg_vy);
self.reg_pc += 2;
},
0x0001 => {
// 0x8XY1: bitwise OR -> VX | VY, store in VX
self.set_reg_vx(instruction, reg_vx | reg_vy);
self.reg_pc += 2;
},
0x0002 => {
// 0x8XY2: bitwise AND -> VX & VY
self.set_reg_vx(instruction, reg_vx & reg_vy);
self.reg_pc += 2;
},
0x0003 => {
// 0x8XY3: XOR -> VX XOR VY
self.set_reg_vx(instruction, reg_vx ^ reg_vy);
self.reg_pc += 2;
},
0x0004 => {
// 0x8XY4: adds VY to VX. VF is set to 1 when there's a carry
// and a 0 when when there isn't
if reg_vy > (0x00FF - reg_vx) {
self.reg_gpr[0xF] = 1;
} else {
self.reg_gpr[0xF] = 0;
}
self.set_reg_vx(instruction, reg_vx.wrapping_add(reg_vy));
self.reg_pc += 2;
},
0x0005 => {
// 0x8XY5: subtracts VY from VX. VF is set to 1 when there's a carry
// and a 0 when when there isn't
if reg_vx > reg_vy {
self.reg_gpr[0xF] = 1;
} else {
self.reg_gpr[0xF] = 0;
}
self.set_reg_vx(instruction, reg_vx - reg_vy);
self.reg_pc += 2;
},
0x0006 => {
// 0x8XY6: stores the LSB of VX in VF and then shifts VX to the right by 1
self.reg_gpr[0xF] = reg_vx & 1;
self.set_reg_vx(instruction, reg_vx >> 1);
self.reg_pc += 2;
},
0x0007 => {
// 0x8XY7: sets VX to VY minus VX.
// VF is set to 0 when there's a borrow, and 1 when there isn't
if reg_vy > reg_vx {
self.reg_gpr[0xF] = 1;
} else {
self.reg_gpr[0xF] = 0;
}
self.set_reg_vy(instruction, reg_vy - reg_vx);
self.reg_pc += 2;
},
0x000E => {
// 0x8XYE: stores the MSB of VX in VF and then shifts VX to the left by 1
self.reg_gpr[0xF] = (reg_vx >> 3) & 1;
self.set_reg_vx(instruction, reg_vx << 1);
self.reg_pc += 2;
},
_ => println!("Invalid opcode {}", instruction)
}
},
0x9000 => {
// 0x9XY0: skips the next instruction if VX doesn't equal VY
if reg_vx != reg_vy {
self.reg_pc += 2;
}
self.reg_pc += 2;
},
0xA000 => {
// 0xANNN: sets I to the address NNN
self.reg_i = nnn;
self.reg_pc += 2;
},
0xB000 => {
// 0xBNNN: jumps to the address NNN plus V0
self.reg_pc = nnn + reg_v0;
},
0xC000 => {
// 0xCXNN: sets VX to the result of a bitwise AND operation
// on a random number (Typically: 0 to 255) and NN
let mut rng = rand::thread_rng();
let rand_num: u8 = rng.gen();
self.set_reg_vx(instruction, rand_num & nn as u8);
self.reg_pc += 2;
},
0xD000 => {
// 0xDXYN: draws a sprite at coordinate (VX, VY), has a width of 8 pixels and
// a height of N + 1 pixels.
let x = reg_vx;
let y = reg_vy;
let height = (instruction & 0x000F) as u8;
let mut pixel: u8;
// set VF to 0
self.reg_gpr[0xF] = 0;
for y_line in 0..height {
// get one byte of sprite data from the mem address in the i register
// pixel = ram.memory[(self.reg_i + y_line as u16) as usize];
pixel = ram.read_byte(self.reg_i + y_line as u16);
for x_line in 0..8 {
if (pixel & (0x0080 >> x_line)) != 0 {
let pos_x: u32 = (x as u32 + x_line as u32) % WIDTH as u32;
let pos_y: u32 = (y as u32 + y_line as u32) % HEIGHT as u32;
if self.gfx[(pos_x + (pos_y * 64 as u32)) as usize] == PX_ON {
self.reg_gpr[0xF] = 1;
}
self.gfx[(pos_x + (pos_y * 64 as u32)) as usize] ^= PX_ON;
}
}
}
self.reg_pc += 2;
},
0xE000 => {
match instruction & 0x000F {
0x000E => {
// 0xEX9E: skips the next instruction if the key stored in VX is pressed
if self.keys[reg_vx as usize] != 0 {
self.reg_pc += 2;
}
self.reg_pc += 2;
},
0x0001 => {
// 0xEXA1: skips the next instruction if the key stored in VX isn't pressed
if self.keys[reg_vx as usize] == 0 {
self.reg_pc += 2;
}
self.reg_pc += 2;
},
_ => println!("Invalid opcode! {:#X}", instruction)
}
},
0xF000 => {
match instruction & 0x000F {
0x0007 => {
// 0xFX07: the value of DT is placed in VX
self.set_reg_vx(instruction, self.reg_dt);
self.reg_pc += 2;
},
0x000A => {
// 0xFX0A: wait for key press, store the value of key into VX
// keypad logic
let mut key_pressed = false;
for i in 0..self.keys.len() {
if self.keys[i] != 0 {
key_pressed = true;
self.set_reg_vx(instruction, i as u8);
break;
}
}
// if no key is pressed, try again
if !key_pressed {
return;
}
self.reg_pc += 2;
},
0x0005 => {
match instruction & 0x00FF {
0x0015 => {
// 0xFX15: set DT to VX
self.reg_dt = reg_vx;
self.reg_pc += 2;
},
0x0055 => {
// 0xFX55: store registers V0 -> VX in memory starting at
// location I
let x = (instruction & 0x0F00) >> 8;
for index in 0..=x {
ram.write_byte(self.reg_i + index, self.reg_gpr[index as usize]);
}
},
0x0065 => {
// 0xFX65: read registers V0 -> Vx from memory starting at
// location I
let x = (instruction & 0x0F00) >> 8;
for index in 0..=x {
self.set_reg_vx(instruction, ram.read_byte(self.reg_i + index));
}
},
_ => println!("Invalid opcode! {:#X}", instruction)
}
},
0x0008 => {
// 0xFX18: set ST to VX
self.reg_st = reg_vx;
self.reg_pc += 2;
},
0x000E => {
// 0xFX1E: set I = I + VX
self.reg_i += reg_vx as u16;
self.reg_pc += 2;
},
0x0009 => {
// 0xFX29: set I = location of sprite for digit VX
self.reg_i = self.gfx[reg_vx as usize] as u16;
self.reg_pc += 2;
},
0x0003 => {
// 0xFX33: store BCD representation of VX in memory locations
// I, I+1, I+2
ram.write_byte(self.reg_i, reg_vx / 100);
ram.write_byte(self.reg_i + 1, (reg_vx / 10) % 10);
ram.write_byte(self.reg_i + 2, (reg_vx % 100) % 10);
self.reg_pc += 2;
},
_ => println!("Invalid opcode! {:#X}", instruction)
}
}
_ => println!("Invalid opcode! {:#X}", instruction)
}
}
} |
fn main() {
/// - Shared Borrowing: Piece of data shared by single or multiple variable, can't be altered
/// - Mutable Borrowing: Piece of data shared by single variable that can alter its value (not accessible for other variables)
let x = 10;
let mut y = 13;
// Shared Borrow
let a = &x;
println!("Value of a:{}", a); //IT SHOULD PRINT 10
println!("Value of x:{}", x); //IT SHOULD PRINT 10
// Mutable Borrow
let b = &mut y;
println!("Value of b:{}", b); //IT SHOULD PRINT 13
*b = 11; // Deferencing operator, it will update b and y values, since it's mutuably borrowed
println!("Value of b:{}", b); // IT SHOULD PRINT 11
println!("Value of y:{}", y); // IT SHOULD PRINT 11
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.