text stringlengths 8 4.13M |
|---|
use diesel::{PgConnection, RunQueryDsl};
use crate::models::Address;
use crate::schema::physical_addresses;
#[derive(Debug, PartialEq, Clone, Default, Insertable)]
#[table_name = "physical_addresses"]
pub struct AddressFactory {
pub post_office_box: Option<String>,
pub extension: Option<String>,
pub street: Option<String>,
pub locality: Option<String>,
pub region: Option<String>,
pub code: Option<String>,
pub country: Option<String>,
}
impl AddressFactory {
pub fn post_office_box<S: Into<String>>(mut self, s: S) -> AddressFactory {
self.post_office_box = Some(s.into());
self
}
pub fn extension<S: Into<String>>(mut self, s: S) -> AddressFactory {
self.extension = Some(s.into());
self
}
pub fn street<S: Into<String>>(mut self, s: S) -> AddressFactory {
self.street = Some(s.into());
self
}
pub fn locality<S: Into<String>>(mut self, s: S) -> AddressFactory {
self.locality = Some(s.into());
self
}
pub fn region<S: Into<String>>(mut self, s: S) -> AddressFactory {
self.region = Some(s.into());
self
}
pub fn code<S: Into<String>>(mut self, s: S) -> AddressFactory {
self.code = Some(s.into());
self
}
pub fn country<S: Into<String>>(mut self, s: S) -> AddressFactory {
self.country = Some(s.into());
self
}
pub fn insert(self, conn: &PgConnection) -> Address {
radmin::diesel::insert_into(physical_addresses::table)
.values(&self)
.get_result(conn)
.expect("Failed to insert new Physical Address")
}
}
|
#![cfg_attr(feature = "unstable", feature(test))]
// Launch program : cargo run --release < input/input.txt
// Launch benchmark : cargo +nightly bench --features "unstable"
/*
Benchmark results:
running 5 tests
test tests::test_part_1 ... ignored
test tests::test_part_2 ... ignored
test bench::bench_parse_input ... bench: 2,488 ns/iter (+/- 338)
test bench::bench_part_1 ... bench: 141 ns/iter (+/- 7)
test bench::bench_part_2 ... bench: 14,131 ns/iter (+/- 1,273)
*/
use std::error::Error;
use std::io::{self, Read, Write};
type Result<T> = ::std::result::Result<T, Box<dyn Error>>;
macro_rules! err {
($($tt:tt)*) => { return Err(Box::<dyn Error>::from(format!($($tt)*))) }
}
fn main() -> Result<()> {
let mut input = String::new();
io::stdin().read_to_string(&mut input)?;
let (earliest_depart_time, buses) = parse_input(&input)?;
writeln!(
io::stdout(),
"Part 1 : {}",
part_1(earliest_depart_time, &buses)?
)?;
writeln!(io::stdout(), "Part 2 : {}", part_2(&buses))?;
Ok(())
}
fn parse_input(input: &str) -> Result<(usize, Vec<usize>)> {
let mut earliest_depart_time = 0;
let mut buses = vec![];
for (i, line) in input.lines().enumerate() {
if i == 0 {
earliest_depart_time = line.parse::<usize>()?;
} else {
for bus in line.split(',') {
if bus != "x" {
buses.push(bus.parse::<usize>()?);
} else {
buses.push(1);
}
}
}
}
Ok((earliest_depart_time, buses))
}
fn part_1(earliest_depart_time: usize, buses: &[usize]) -> Result<usize> {
if let Some((bus, waiting)) = buses
.iter()
.filter(|&v| *v != 1)
.map(|bus_id| (bus_id, bus_id - (earliest_depart_time % bus_id)))
.min_by(|a, b| a.1.cmp(&b.1))
{
Ok(bus * waiting)
} else {
err!("Could not find a minimum, is the input empty?")
}
}
fn part_2(buses: &[usize]) -> usize {
// Chinese remainder theorem
// Only works since all buses ID are prime numbers
let global_coprime: usize = buses.iter().product();
let mut factors = vec![];
for (i, bus) in buses.iter().rev().enumerate() {
let current_factor = global_coprime / bus;
let mut j = 1;
while (current_factor * j) % bus != i % bus {
j += 1;
}
factors.push(current_factor * j);
}
factors.iter().sum::<usize>() % global_coprime - buses.len() + 1
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::File;
fn read_test_file() -> Result<String> {
let mut input = String::new();
File::open("input/test.txt")?.read_to_string(&mut input)?;
Ok(input)
}
#[test]
fn test_part_1() -> Result<()> {
let (earliest_depart_time, buses) = parse_input(&read_test_file()?)?;
assert_eq!(part_1(earliest_depart_time, &buses)?, 295);
Ok(())
}
#[test]
fn test_part_2() -> Result<()> {
let (_, buses) = parse_input(&read_test_file()?)?;
assert_eq!(part_2(&buses), 1068781);
Ok(())
}
}
#[cfg(all(feature = "unstable", test))]
mod bench {
extern crate test;
use super::*;
use std::fs::File;
use test::Bencher;
fn read_input_file() -> Result<String> {
let mut input = String::new();
File::open("input/input.txt")?.read_to_string(&mut input)?;
Ok(input)
}
#[bench]
fn bench_parse_input(b: &mut Bencher) -> Result<()> {
let input = read_input_file()?;
b.iter(|| test::black_box(parse_input(&input)));
Ok(())
}
#[bench]
fn bench_part_1(b: &mut Bencher) -> Result<()> {
let (earliest_depart_time, buses) = parse_input(&read_input_file()?)?;
b.iter(|| test::black_box(part_1(earliest_depart_time, &buses)));
Ok(())
}
#[bench]
fn bench_part_2(b: &mut Bencher) -> Result<()> {
let (_, buses) = parse_input(&read_input_file()?)?;
b.iter(|| test::black_box(part_2(&buses)));
Ok(())
}
}
|
use std::io::{BufReader, Read};
use thiserror::Error;
use crate::{
filler::{decode_fillers, Filler, FillersDecodeError},
internal::{CelesteIo, Lookup, LookupRef, Node, NodeReadError, NonRleString, StringReadError},
screen::{decode_screens, ScreensDecodeError},
Screen,
};
#[derive(Debug)]
pub struct CelesteMap {
name: String,
pub(crate) unread: Node,
fillers: Vec<Filler>,
screens: Vec<Screen>,
}
impl CelesteMap {
pub fn new(name: String) -> Self {
CelesteMap {
name,
unread: Node::new("Map".into()),
fillers: Vec::new(),
screens: Vec::new(),
}
}
pub fn read<R: Read>(mut reader: BufReader<R>) -> Result<Self, CelesteMapReadError> {
<CelesteMap as CelesteIo>::read(&mut reader, None)
}
pub fn fillers(&self) -> &[Filler] {
&self.fillers
}
pub fn fillers_mut(&mut self) -> &mut Vec<Filler> {
&mut self.fillers
}
pub fn screens(&self) -> &[Screen] {
&self.screens
}
pub fn screens_mut(&mut self) -> &mut Vec<Screen> {
&mut self.screens
}
}
#[derive(Error, Debug)]
pub enum CelesteMapReadError {
#[error("io error")]
Io(#[from] std::io::Error),
#[error("not expecting lookup")]
GivenLookup,
#[error("map header malformed")]
MalformedHeader(StringReadError),
#[error("map header is incorrect")]
IncorrectHeader,
#[error("map name malformed")]
MapNameError(#[from] StringReadError),
#[error("root node read error")]
RootNodeError(#[from] NodeReadError),
#[error("failed decoding fillers")]
FillersDecodeError(#[from] FillersDecodeError),
#[error("failed decoding screens")]
ScreensDecodeError(#[from] ScreensDecodeError),
}
impl CelesteIo for CelesteMap {
type Error = CelesteMapReadError;
fn read<R: Read>(
reader: &mut BufReader<R>,
lookup: Option<LookupRef<'_>>,
) -> Result<Self, Self::Error> {
if lookup.is_some() {
return Err(CelesteMapReadError::GivenLookup);
}
if NonRleString::read(reader, None)
.map_err(CelesteMapReadError::MalformedHeader)?
.0
!= "CELESTE MAP"
{
return Err(CelesteMapReadError::IncorrectHeader);
}
let mut map = CelesteMap::new(NonRleString::read(reader, None)?.0);
let lookup = Lookup::new({
let count = u16::read(reader, lookup)? as usize;
let mut lookup = Vec::with_capacity(count);
for _ in 0..count {
lookup.push(NonRleString::read(reader, None)?.0)
}
lookup
});
map.unread = Node::read(reader, Some(lookup.as_ref()))?;
decode_fillers(&mut map)?;
decode_screens(&mut map)?;
Ok(map)
}
}
|
use super::Twzobj;
use std::sync::Arc;
pub type ObjID = u128;
pub fn objid_from_parts(upper: u64, lower: u64) -> ObjID {
((upper as ObjID) << 64) | lower as ObjID
}
pub fn objid_parse(s: &str) -> Option<ObjID> {
let mut s = s.trim();
let radix = 16;
if s.contains(":") {
/* parse upper and lower separately */
let mut split = s.split(":");
let upper = split.next()?.trim_start_matches("0x");
let lower = split.next()?.trim_start_matches("0x");
let upper_num = u64::from_str_radix(upper, radix).ok()?;
let lower_num = u64::from_str_radix(lower, radix).ok()?;
Some(objid_from_parts(upper_num, lower_num))
} else {
if s.contains("0x") {
s = s.trim_start_matches("0x");
}
ObjID::from_str_radix(s, radix).ok()
}
}
impl<T> Twzobj<T> {
pub fn id(&self) -> ObjID {
if self.internal.id == 0 {
todo!();
}
self.internal.id
}
pub(crate) fn set_id(&mut self, id: ObjID) {
Arc::get_mut(&mut self.internal).unwrap().id = id;
}
}
|
#[cfg(target_os = "windows")]
pub use windows::*;
#[cfg(target_os = "linux")]
pub use linux::*;
use ash::vk::ExtensionProperties;
use std::ffi::CStr;
pub fn check_extensions_support(
required: &[&'static CStr],
supported: &[ExtensionProperties],
) -> Result<(), Vec<&'static str>> {
let supported: Vec<_> = supported
.iter()
// extension_name はこの関数のスコープより長いライフタイムを持つので安全。
.map(|supported| unsafe { CStr::from_ptr(supported.extension_name.as_ptr()) })
.collect();
let missing: Vec<_> = required
.into_iter()
.filter(|required| {
supported
.iter()
.find(|supported| required == supported)
.is_none()
})
// ash::extension::{khr, ext など}::{拡張の名前}::name() から得られる拡張の名前は utf-8 に変換できるはず。
.map(|missing| missing.to_str().unwrap())
.collect();
if missing.len() > 0 {
Err(missing)
} else {
Ok(())
}
}
#[cfg(target_os = "windows")]
mod windows {
use ash::extensions::khr;
use std::ffi::CStr;
pub fn extensions_for_presentation() -> Vec<&'static CStr> {
vec![khr::Surface::name(), khr::Win32Surface::name()]
}
}
#[cfg(target_os = "linux")]
mod linux {
use ash::extensions::khr;
use std::ffi::CStr;
pub fn extensions_for_presentation() -> Vec<&'static CStr> {
vec![
khr::Surface::name(),
khr::XlibSurface::name(),
khr::XcbSurface::name(),
]
}
}
|
use crate::block::Block;
use crate::transaction::{CellOutput, OutPoint, Transaction};
use crate::Capacity;
use fnv::{FnvHashMap, FnvHashSet};
use numext_fixed_hash::H256;
use serde_derive::{Deserialize, Serialize};
#[derive(Clone, Eq, PartialEq, Debug, Default, Deserialize, Serialize)]
pub struct CellMeta {
#[serde(skip)]
pub cell_output: Option<CellOutput>,
pub out_point: OutPoint,
pub block_number: Option<u64>,
pub cellbase: bool,
pub capacity: Capacity,
pub data_hash: Option<H256>,
}
impl From<&CellOutput> for CellMeta {
fn from(output: &CellOutput) -> Self {
CellMeta {
cell_output: Some(output.clone()),
capacity: output.capacity,
..Default::default()
}
}
}
impl CellMeta {
pub fn is_cellbase(&self) -> bool {
self.cellbase
}
pub fn capacity(&self) -> Capacity {
self.capacity
}
pub fn data_hash(&self) -> Option<&H256> {
self.data_hash.as_ref()
}
}
#[derive(PartialEq, Debug)]
pub enum CellStatus {
/// Cell exists and has not been spent.
Live(Box<CellMeta>),
/// Cell exists and has been spent.
Dead,
/// Cell does not exist.
Unknown,
}
impl CellStatus {
pub fn live_cell(cell_meta: CellMeta) -> CellStatus {
CellStatus::Live(Box::new(cell_meta))
}
pub fn is_live(&self) -> bool {
match *self {
CellStatus::Live(_) => true,
_ => false,
}
}
pub fn is_dead(&self) -> bool {
self == &CellStatus::Dead
}
pub fn is_unknown(&self) -> bool {
self == &CellStatus::Unknown
}
}
/// Transaction with resolved input cells.
#[derive(Debug)]
pub struct ResolvedTransaction<'a> {
pub transaction: &'a Transaction,
pub dep_cells: Vec<CellMeta>,
pub input_cells: Vec<CellMeta>,
}
pub trait CellProvider {
fn cell(&self, out_point: &OutPoint) -> CellStatus;
}
pub struct OverlayCellProvider<'a> {
overlay: &'a CellProvider,
cell_provider: &'a CellProvider,
}
impl<'a> OverlayCellProvider<'a> {
pub fn new(overlay: &'a CellProvider, cell_provider: &'a CellProvider) -> Self {
Self {
overlay,
cell_provider,
}
}
}
impl<'a> CellProvider for OverlayCellProvider<'a> {
fn cell(&self, out_point: &OutPoint) -> CellStatus {
match self.overlay.cell(out_point) {
CellStatus::Live(cell_meta) => CellStatus::Live(cell_meta),
CellStatus::Dead => CellStatus::Dead,
CellStatus::Unknown => self.cell_provider.cell(out_point),
}
}
}
pub struct BlockCellProvider<'a> {
output_indices: FnvHashMap<H256, usize>,
block: &'a Block,
}
impl<'a> BlockCellProvider<'a> {
pub fn new(block: &'a Block) -> Self {
let output_indices = block
.transactions()
.iter()
.enumerate()
.map(|(idx, tx)| (tx.hash(), idx))
.collect();
Self {
output_indices,
block,
}
}
}
impl<'a> CellProvider for BlockCellProvider<'a> {
fn cell(&self, out_point: &OutPoint) -> CellStatus {
self.output_indices
.get(&out_point.tx_hash)
.and_then(|i| {
self.block.transactions()[*i]
.outputs()
.get(out_point.index as usize)
.map(|output| {
CellStatus::live_cell(CellMeta {
cell_output: Some(output.clone()),
out_point: out_point.to_owned(),
data_hash: None,
capacity: output.capacity,
block_number: Some(self.block.header().number()),
cellbase: *i == 0,
})
})
})
.unwrap_or_else(|| CellStatus::Unknown)
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum UnresolvableError {
Dead(OutPoint),
Unknown(Vec<OutPoint>),
}
pub fn resolve_transaction<'a, CP: CellProvider>(
transaction: &'a Transaction,
seen_inputs: &mut FnvHashSet<OutPoint>,
cell_provider: &CP,
) -> Result<ResolvedTransaction<'a>, UnresolvableError> {
let (mut unknown_out_points, mut input_cells, mut dep_cells) = (
Vec::new(),
Vec::with_capacity(transaction.inputs().len()),
Vec::with_capacity(transaction.deps().len()),
);
// skip resolve input of cellbase
if !transaction.is_cellbase() {
for out_point in transaction.input_pts() {
let cell_status = if seen_inputs.insert(out_point.clone()) {
cell_provider.cell(&out_point)
} else {
CellStatus::Dead
};
match cell_status {
CellStatus::Dead => {
return Err(UnresolvableError::Dead(out_point.clone()));
}
CellStatus::Unknown => {
unknown_out_points.push(out_point.clone());
}
CellStatus::Live(cell_meta) => {
input_cells.push(*cell_meta);
}
}
}
}
for out_point in transaction.dep_pts() {
let cell_status = if seen_inputs.contains(&out_point) {
CellStatus::Dead
} else {
cell_provider.cell(&out_point)
};
match cell_status {
CellStatus::Dead => {
return Err(UnresolvableError::Dead(out_point.clone()));
}
CellStatus::Unknown => {
unknown_out_points.push(out_point.clone());
}
CellStatus::Live(cell_meta) => {
dep_cells.push(*cell_meta);
}
}
}
if !unknown_out_points.is_empty() {
Err(UnresolvableError::Unknown(unknown_out_points))
} else {
Ok(ResolvedTransaction {
transaction,
input_cells,
dep_cells,
})
}
}
impl<'a> ResolvedTransaction<'a> {
pub fn fee(&self) -> ::occupied_capacity::Result<Capacity> {
self.inputs_capacity().and_then(|x| {
self.transaction.outputs_capacity().and_then(|y| {
if x > y {
x.safe_sub(y)
} else {
Ok(Capacity::zero())
}
})
})
}
pub fn inputs_capacity(&self) -> ::occupied_capacity::Result<Capacity> {
self.input_cells
.iter()
.map(CellMeta::capacity)
.try_fold(Capacity::zero(), Capacity::safe_add)
}
}
#[cfg(test)]
mod tests {
use super::super::script::Script;
use super::*;
use crate::{capacity_bytes, Bytes, Capacity};
use numext_fixed_hash::H256;
use std::collections::HashMap;
struct CellMemoryDb {
cells: HashMap<OutPoint, Option<CellMeta>>,
}
impl CellProvider for CellMemoryDb {
fn cell(&self, o: &OutPoint) -> CellStatus {
match self.cells.get(o) {
Some(&Some(ref cell_meta)) => CellStatus::live_cell(cell_meta.clone()),
Some(&None) => CellStatus::Dead,
None => CellStatus::Unknown,
}
}
}
#[test]
fn cell_provider_trait_works() {
let mut db = CellMemoryDb {
cells: HashMap::new(),
};
let p1 = OutPoint {
tx_hash: H256::zero(),
index: 1,
};
let p2 = OutPoint {
tx_hash: H256::zero(),
index: 2,
};
let p3 = OutPoint {
tx_hash: H256::zero(),
index: 3,
};
let o = {
let cell_output = CellOutput {
capacity: capacity_bytes!(2),
data: Bytes::default(),
lock: Script::default(),
type_: None,
};
CellMeta {
block_number: Some(1),
capacity: cell_output.capacity,
data_hash: Some(cell_output.data_hash()),
cell_output: Some(cell_output),
out_point: OutPoint {
tx_hash: Default::default(),
index: 0,
},
cellbase: false,
}
};
db.cells.insert(p1.clone(), Some(o.clone()));
db.cells.insert(p2.clone(), None);
assert_eq!(CellStatus::Live(Box::new(o)), db.cell(&p1));
assert_eq!(CellStatus::Dead, db.cell(&p2));
assert_eq!(CellStatus::Unknown, db.cell(&p3));
}
}
|
/// This file defines data transfer objects.
use serde::{Deserialize, Serialize};
/// The information required to display a game in an overview table.
#[derive(Serialize, Deserialize)]
pub struct GameHeader {
pub id: i64,
pub description: String,
pub members: Vec<Member>,
}
/// The information required to create a new game. The creator of the game is
/// determined on the server using login information and not transfered in the
/// DTO.
#[derive(Clone, Serialize, Deserialize)]
pub struct GameCreate {
pub description: String,
}
/// Publicly available information about a user.
#[derive(Clone, Serialize, Deserialize)]
pub struct UserInfo {
pub id: i64,
pub username: String,
}
/// A game can have several members with different roles.
/// TODO: Rename to GameMember
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Member {
pub id: i64,
pub username: String,
pub role: MemberRole,
pub accepted: bool,
}
/// The integers should be server only, the tags should be send to the client.
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum MemberRole {
WhitePlayer = 1,
BlackPlayer = 2,
Watcher = 3,
}
/// This implementation is important for database mapping.
impl rusqlite::types::FromSql for MemberRole {
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
use rusqlite::types::FromSqlError::{InvalidType, OutOfRange};
use rusqlite::types::ValueRef::Integer;
use MemberRole::{BlackPlayer, Watcher, WhitePlayer};
match value {
Integer(1) => Ok(WhitePlayer),
Integer(2) => Ok(BlackPlayer),
Integer(3) => Ok(Watcher),
Integer(n) => Err(OutOfRange(n)),
_ => Err(InvalidType),
}
}
}
impl rusqlite::types::ToSql for MemberRole {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput> {
use rusqlite::types::ToSqlOutput::Owned;
use rusqlite::types::Value::Integer;
Ok(Owned(Integer(*self as i64)))
}
}
/// This type can be send to a game, the server will try to update the game
/// accordingly. A Setup Message can already be send before the game is running.
/// TODO: Consider only having a single message type for client -> server requests.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum SetupMessage {
SetDescription(String),
UpdateMember(Member),
}
/// The integers should be server only, the tags should be send to the client.
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum ReadyState {
/// The user has been invited to the game but has not accepted the request.
Invited = 1,
/// The user has accepted the invite, but is not ready to start playing.
Accepted = 2,
/// The user is ready to start playing the game. When all players declare
/// themself ready, the game should start.
Ready = 3,
}
/// This implementation is important for database mapping.
impl rusqlite::types::FromSql for ReadyState {
fn column_result(value: rusqlite::types::ValueRef) -> rusqlite::types::FromSqlResult<Self> {
use rusqlite::types::FromSqlError::{InvalidType, OutOfRange};
use rusqlite::types::ValueRef::Integer;
use ReadyState::*;
match value {
Integer(1) => Ok(Invited),
Integer(2) => Ok(Accepted),
Integer(3) => Ok(Ready),
Integer(n) => Err(OutOfRange(n)),
_ => Err(InvalidType),
}
}
}
impl rusqlite::types::ToSql for ReadyState {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput> {
use rusqlite::types::ToSqlOutput::Owned;
use rusqlite::types::Value::Integer;
Ok(Owned(Integer(*self as i64)))
}
}
|
#![no_std]
#![feature(start)]
#![no_main]
use ferr_os_librust::syscall;
extern crate alloc;
use alloc::string::String;
use alloc::vec::Vec;
#[no_mangle]
pub extern "C" fn _start(heap_address: u64, heap_size: u64, _args: u64) {
unsafe {
ferr_os_librust::allocator::init(heap_address, heap_size);
let id = syscall::fork();
if id == 0 {
syscall::exec(&String::from("/usr/clock"), &Vec::from([String::from("clock")]));
}
syscall::exec(&String::from("/usr/ferr_shell"), &Vec::from([String::from("ferr_shell")]));
}
}
|
//! A buffer is a memory location accessible to the video card.
//!
//! The purpose of buffers is to serve as a space where the GPU can read from or write data to.
//! It can contain a list of vertices, indices, uniform data, etc.
//!
//! # Buffers management in glium
//!
//! There are three levels of abstraction in glium:
//!
//! - A `Buffer` corresponds to an OpenGL buffer object. This type is not public.
//! - A `BufferView` corresponds to a part of a `Buffer`. One buffer can contain one or multiple
//! subbuffers.
//! - The `VertexBuffer`, `IndexBuffer`, `UniformBuffer`, `PixelBuffer`, ... types are
//! abstractions over a subbuffer indicating their specific purpose. They implement `Deref`
//! for the subbuffer. These types are in the `vertex`, `index`, ... modules.
//!
pub use self::view::{BufferView, BufferViewAny, BufferViewMutSlice};
pub use self::view::{BufferViewSlice, BufferViewAnySlice};
pub use self::alloc::{Mapping, WriteMapping, ReadMapping};
use gl;
mod alloc;
mod view;
/// Error that can happen when creating a buffer.
#[derive(Debug)]
pub enum BufferCreationError {
/// Not enough memory to create the buffer.
OutOfMemory,
/// This type of buffer is not supported.
BufferTypeNotSupported,
}
/// Type of a buffer.
#[doc(hidden)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum BufferType {
ArrayBuffer,
PixelPackBuffer,
PixelUnpackBuffer,
UniformBuffer,
CopyReadBuffer,
CopyWriteBuffer,
AtomicCounterBuffer,
DispatchIndirectBuffer,
DrawIndirectBuffer,
QueryBuffer,
ShaderStorageBuffer,
TextureBuffer,
TransformFeedbackBuffer,
ElementArrayBuffer,
}
impl BufferType {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
BufferType::ArrayBuffer => gl::ARRAY_BUFFER,
BufferType::PixelPackBuffer => gl::PIXEL_PACK_BUFFER,
BufferType::PixelUnpackBuffer => gl::PIXEL_UNPACK_BUFFER,
BufferType::UniformBuffer => gl::UNIFORM_BUFFER,
BufferType::CopyReadBuffer => gl::COPY_READ_BUFFER,
BufferType::CopyWriteBuffer => gl::COPY_WRITE_BUFFER,
BufferType::AtomicCounterBuffer => gl::ATOMIC_COUNTER_BUFFER,
BufferType::DispatchIndirectBuffer => gl::DISPATCH_INDIRECT_BUFFER,
BufferType::DrawIndirectBuffer => gl::DRAW_INDIRECT_BUFFER,
BufferType::QueryBuffer => gl::QUERY_BUFFER,
BufferType::ShaderStorageBuffer => gl::SHADER_STORAGE_BUFFER,
BufferType::TextureBuffer => gl::TEXTURE_BUFFER,
BufferType::TransformFeedbackBuffer => gl::TRANSFORM_FEEDBACK_BUFFER,
BufferType::ElementArrayBuffer => gl::ELEMENT_ARRAY_BUFFER,
}
}
}
|
use crate::packets::{
open::OpenMessage,
keepalive::KeepaliveMessage,
update::UpdateMessage,
};
#[derive(PartialEq, Eq, Debug, Clone, Hash)]
pub enum Event {
ManualStart,
TcpConnectionConfirmed,
BgpOpen(OpenMessage),
KeepAliveMsg(KeepaliveMessage),
UpdateMsg(UpdateMessage),
Established,
LocRibChanged,
AdjRibOutChanged,
AdjRibInChanged,
} |
//! Internal attributes of the form `#[auto_impl(name(...))]` that can be
//! attached to trait items.
use proc_macro2::{Delimiter, TokenTree};
use proc_macro_error::{abort, emit_error};
use syn::{
spanned::Spanned,
visit_mut::{visit_item_trait_mut, VisitMut},
Attribute, Meta, TraitItem,
};
use crate::proxy::{parse_types, ProxyType};
/// Removes all `#[auto_impl]` attributes that are attached to methods of the
/// given trait.
pub(crate) fn remove_our_attrs(trait_def: &mut syn::ItemTrait) {
struct AttrRemover;
impl VisitMut for AttrRemover {
fn visit_trait_item_mut(&mut self, item: &mut TraitItem) {
let item_span = item.span();
let (attrs, is_method) = match item {
TraitItem::Fn(m) => (&mut m.attrs, true),
TraitItem::Const(c) => (&mut c.attrs, false),
TraitItem::Type(t) => (&mut t.attrs, false),
TraitItem::Macro(m) => (&mut m.attrs, false),
_ => abort!(
item.span(),
"encountered unexpected `TraitItem`, cannot handle that, sorry!";
note = "auto-impl supports only methods, consts, types and macros currently";
),
};
// Make sure non-methods do not have our attributes.
if !is_method && attrs.iter().any(is_our_attr) {
emit_error!(
item_span,
"`#[auto_impl]` attributes are only allowed on methods",
);
}
attrs.retain(|a| !is_our_attr(a));
}
}
visit_item_trait_mut(&mut AttrRemover, trait_def);
}
/// Checks if the given attribute is "our" attribute. That means that it's path
/// is `auto_impl`.
pub(crate) fn is_our_attr(attr: &Attribute) -> bool {
attr.path().is_ident("auto_impl")
}
/// Tries to parse the given attribute as one of our own `auto_impl`
/// attributes. If it's invalid, an error is emitted and `Err(())` is returned.
/// You have to make sure that `attr` is one of our attrs with `is_our_attr`
/// before calling this function!
pub(crate) fn parse_our_attr(attr: &Attribute) -> Result<OurAttr, ()> {
assert!(is_our_attr(attr));
// Get the body of the attribute (which has to be a ground, because we
// required the syntax `auto_impl(...)` and forbid stuff like
// `auto_impl = ...`).
let body = match &attr.meta {
Meta::List(list) => list.tokens.clone(),
_ => {
emit_error!(attr.span(), "expected single group delimited by `()`");
return Err(());
}
};
let mut it = body.clone().into_iter();
// Try to extract the name (we require the body to be `name(...)`).
let name = match it.next() {
Some(TokenTree::Ident(x)) => x,
Some(other) => {
emit_error!(other.span(), "expected ident, found '{}'", other);
return Err(());
}
None => {
emit_error!(attr.span(), "expected ident, found nothing");
return Err(());
}
};
// Extract the parameters (which again, have to be a group delimited by
// `()`)
let params = match it.next() {
Some(TokenTree::Group(ref g)) if g.delimiter() == Delimiter::Parenthesis => g.stream(),
Some(other) => {
emit_error!(
other.span(),
"expected arguments for '{}' in parenthesis `()`, found `{}`",
name,
other,
);
return Err(());
}
None => {
emit_error!(
body.span(),
"expected arguments for '{}' in parenthesis `()`, found nothing",
name,
);
return Err(());
}
};
// Finally match over the name of the attribute.
let out = if name == "keep_default_for" {
let proxy_types = parse_types(params.into());
OurAttr::KeepDefaultFor(proxy_types)
} else {
emit_error!(
name.span(), "invalid attribute '{}'", name;
note = "only `keep_default_for` is supported";
);
return Err(());
};
Ok(out)
}
/// Attributes of the form `#[auto_impl(...)]` that can be attached to items of
/// the trait.
#[derive(Clone, PartialEq, Debug)]
pub(crate) enum OurAttr {
KeepDefaultFor(Vec<ProxyType>),
}
|
use std::ops::{Add, RangeBounds, Sub};
use crate::{
grid::config::Entity,
grid::records::{ExactRecords, Records},
settings::object::{cell::EntityOnce, Object},
};
use super::util::bounds_to_usize;
/// Row denotes a set of cells on given rows on a [`Table`].
///
/// [`Table`]: crate::Table
#[derive(Debug)]
pub struct Rows<R> {
range: R,
}
impl<R> Rows<R> {
/// Returns a new instance of [`Rows`] for a range of rows.
///
/// If the boundaries are exceeded it may panic.
pub fn new(range: R) -> Self
where
R: RangeBounds<usize>,
{
Self { range }
}
pub(crate) const fn get_range(&self) -> &R {
&self.range
}
}
impl Rows<()> {
/// Returns a new instance of [`Rows`] with a single row.
///
/// If the boundaries are exceeded it may panic.
pub const fn single(index: usize) -> Row {
Row { index }
}
/// Returns a first row [`Object`].
///
/// If the table has 0 rows returns an empty set of cells.
pub const fn first() -> FirstRow {
FirstRow
}
/// Returns a last row [`Object`].
///
/// If the table has 0 rows returns an empty set of cells.
pub const fn last() -> LastRow {
LastRow
}
}
impl<I, R> Object<I> for Rows<R>
where
R: RangeBounds<usize>,
I: ExactRecords,
{
type Iter = RowsIter;
fn cells(&self, records: &I) -> Self::Iter {
let start = self.range.start_bound();
let end = self.range.end_bound();
let max = records.count_rows();
let (x, y) = bounds_to_usize(start, end, max);
RowsIter::new(x, y)
}
}
/// A row which is located by an offset from the first row.
#[derive(Debug, Clone, Copy)]
pub struct Row {
index: usize,
}
impl<I> Object<I> for Row {
type Iter = EntityOnce;
fn cells(&self, _: &I) -> Self::Iter {
EntityOnce::new(Some(Entity::Row(self.index)))
}
}
impl From<Row> for usize {
fn from(val: Row) -> Self {
val.index
}
}
/// This structure represents the first row of a [`Table`].
/// It's often contains headers data.
///
/// [`Table`]: crate::Table
#[derive(Debug)]
pub struct FirstRow;
impl<I> Object<I> for FirstRow
where
I: Records + ExactRecords,
{
type Iter = EntityOnce;
fn cells(&self, records: &I) -> Self::Iter {
if records.count_columns() == 0 || records.count_rows() == 0 {
return EntityOnce::new(None);
}
EntityOnce::new(Some(Entity::Row(0)))
}
}
impl Add<usize> for FirstRow {
type Output = Row;
fn add(self, rhs: usize) -> Self::Output {
Row { index: rhs }
}
}
/// This structure represents the last row of a [`Table`].
///
/// [`Table`]: crate::Table
#[derive(Debug)]
pub struct LastRow;
impl<I> Object<I> for LastRow
where
I: Records + ExactRecords,
{
type Iter = EntityOnce;
fn cells(&self, records: &I) -> Self::Iter {
let count_rows = records.count_rows();
if records.count_columns() == 0 || count_rows == 0 {
return EntityOnce::new(None);
}
let row = if count_rows == 0 { 0 } else { count_rows - 1 };
EntityOnce::new(Some(Entity::Row(row)))
}
}
impl Sub<usize> for LastRow {
type Output = LastRowOffset;
fn sub(self, rhs: usize) -> Self::Output {
LastRowOffset { offset: rhs }
}
}
/// A row which is located by an offset from the last row.
#[derive(Debug)]
pub struct LastRowOffset {
offset: usize,
}
impl<I> Object<I> for LastRowOffset
where
I: Records + ExactRecords,
{
type Iter = EntityOnce;
fn cells(&self, records: &I) -> Self::Iter {
let count_rows = records.count_rows();
if records.count_columns() == 0 || count_rows == 0 {
return EntityOnce::new(None);
}
let row = if count_rows == 0 { 0 } else { count_rows - 1 };
if self.offset > row {
return EntityOnce::new(None);
}
let row = row - self.offset;
EntityOnce::new(Some(Entity::Row(row)))
}
}
/// An [`Iterator`] which goes goes over all rows of a [`Table`].
///
/// [`Table`]: crate::Table
#[derive(Debug)]
pub struct RowsIter {
start: usize,
end: usize,
}
impl RowsIter {
const fn new(start: usize, end: usize) -> Self {
Self { start, end }
}
}
impl Iterator for RowsIter {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
if self.start >= self.end {
return None;
}
let col = self.start;
self.start += 1;
Some(Entity::Row(col))
}
}
|
extern crate capnp;
extern crate capnp_rpc;
extern crate futures;
extern crate tokio;
extern crate rpc_fun;
use std::net::SocketAddr;
// use capnp::capability::Promise;
// use capnp::Error;
use capnp_rpc::twoparty::VatNetwork;
use capnp_rpc::{rpc_twoparty_capnp, RpcSystem};
use futures::{Future, Stream};
use tokio::executor::current_thread;
use tokio::net::{TcpListener, TcpStream};
use tokio::prelude::*;
use rpc_fun::proto_capnp::connect;
use rpc_fun::Service;
fn main() {
let args: Vec<String> = ::std::env::args().collect();
if args.len() < 2 {
println!("need to set server or client");
return;
}
match args[1].as_ref() {
"server" => run_server(),
"client" => run_client(args[2].to_string()),
_ => println!("unknown"),
}
}
fn run_server() {
println!("server");
let addr: SocketAddr = String::from("127.0.0.1:4389").parse().unwrap();
let listener = TcpListener::bind(&addr).unwrap();
let server = listener
.incoming()
.for_each(|conn| {
let (reader, writer) = conn.split();
let network = VatNetwork::new(
reader,
writer,
rpc_twoparty_capnp::Side::Server,
Default::default(),
);
let serv = connect::ToClient::new(Service).from_server::<capnp_rpc::Server>();
let rpc_sys = RpcSystem::new(Box::new(network), Some(serv.clone().client));
current_thread::spawn(rpc_sys.map_err(|_| ()));
Ok(())
})
.map_err(|e| println!("Failed Accept: {:?}", e));
let res = current_thread::block_on_all(server);
match res {
Ok(x) => println!("Server: {:?}", x),
Err(err) => println!("Server Err: {:?}", err),
}
}
fn run_client(n: String) {
println!("client: {}", n);
let addr: SocketAddr = String::from("127.0.0.1:4389").parse().unwrap();
let stream = TcpStream::connect(&addr)
.and_then(move |conn| {
let (reader, writer) = conn.split();
let network = Box::new(VatNetwork::new(
reader,
writer,
rpc_twoparty_capnp::Side::Client,
Default::default(),
));
let mut rpc_sys = RpcSystem::new(network, None);
let serv: connect::Client = rpc_sys.bootstrap(rpc_twoparty_capnp::Side::Server);
current_thread::spawn(rpc_sys.map_err(|e| println!("Client Err: {:?}", e)));
let mut reg = serv.register_request();
reg.get().set_name(&n);
let reg = reg.send()
.promise
.and_then(|res| {
let sess = res.get().unwrap().get_session().unwrap();
let num = sess.get_number_request().send().promise.and_then(|res| {
println!("Got Number: {}", res.get().unwrap().get_val());
Ok(())
});
current_thread::spawn(num.map_err(|e| println!("NumErr: {:?}", e)));
let nam = sess.get_name_request().send().promise.and_then(|res| {
println!("Got Name: {}", res.get().unwrap().get_name().unwrap());
Ok(())
});
current_thread::spawn(nam.map_err(|e| println!("NameErr: {:?}", e)));
Ok(())
})
.map_err(|e| println!("Reg Err: {:?}", e));
current_thread::spawn(reg.map_err(|e| println!("ERR: {:?}", e)));
Ok(())
})
.map_err(|e| println!("Err: {:?}", e));
let res = current_thread::block_on_all(stream);
match res {
Ok(x) => println!("Client: {:?}", x),
Err(err) => println!("Client Err: {:?}", err),
}
}
|
use std::convert::TryFrom;
use std::convert::TryInto;
use std::f64;
use std::u32;
use std::u8;
use geo::polygon;
use geo_types::{Geometry, Polygon};
use catalog::AsFeatureCollection;
use rocket::http::Status;
use serde_json::{to_string};
use rocket::{State, response::content::Json};
use rocket::response::status::BadRequest;
use rocket::serde;
use wkt::Wkt;
use crate::catalog::ImageContainsPolygon;
use crate::catalog::ImageIntersectsGeom;
use crate::catalog::ImageryFile;
use crate::transform;
use crate::catalog;
enum SortOrder {
Asc,
Desc
}
fn bbox_to_bounds(bbox: Vec<f64>) -> Result<Geometry<f64>, BadRequest<String>> {
if bbox.len() != 4 || bbox[0] >= bbox[2] || bbox[1] >= bbox[3] {
return Err(BadRequest(Some("Invalid bbox. bbox must contain 4 numbers in the following format: bbox=minx,miny,maxx,maxy".into())));
}
let p: Polygon<f64> = polygon![
(x:bbox[0], y: bbox[1]),
(x:bbox[2], y: bbox[1]),
(x:bbox[2], y: bbox[3]),
(x:bbox[0], y: bbox[3]),
];
let g: Geometry<f64> = p.into();
Ok(g)
}
/// parse WKT supplied in a query param
fn query_to_bounds(query_str: &str) -> Result<Geometry<f64>, BadRequest<String>> {
// convert the contains query into a Geometry.
// WKT format is expected.
// If any errors occur, respond to the request with a 400 error.
let wkt_geom = match Wkt::from_str(query_str) {
Ok(w) => w,
Err(_) => return Err(BadRequest(
Some("Invalid WKT in `contains` query param. Example of a valid query: \
?contains=POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))".into()))),
};
let bounds: Geometry<f64> = match wkt_geom.try_into() {
Ok(g) => g,
Err(_) => return Err(BadRequest(
Some("Invalid WKT in `contains` query param. Example of a valid query: \
?contains=POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))".into()))),
};
Ok(bounds)
}
/// STAC API Item endpoint
/// returns a GeoJSON Feature representing the item.
/// https://github.com/radiantearth/stac-api-spec/blob/master/stac-spec/item-spec/README.md
#[get("/collections/<collection_id>/<item_id>")]
pub fn get_collection_item(
collection_id: String,
item_id: String,
coverage: &State<catalog::Service>
) -> Option<Json<String>> {
let collection = match coverage.collections.get(&collection_id) {
Some(c) => c,
None => return None, // becomes a 404
};
let item = match collection.get_item(item_id) {
Some(i) => i,
None => return None, // 404
};
Some(Json(to_string(&item.to_stac_feature()).unwrap()))
}
/// Details for a single collection. The collection that matches `collection_id`
/// will be represented as a filtered FeatureCollection if an `intersects` or `contains` filter
/// is supplied; or if no filter supplied, a STAC Collection will be returned.
/// example: /collections/imagery?intersects=POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))
#[get("/collections/<collection_id>?<intersects>&<contains>&<sortby>&<limit>")]
pub fn get_collection(
collection_id: String,
intersects: Option<&str>,
contains: Option<&str>,
sortby: Option<&str>,
limit: Option<usize>,
coverage: &State<catalog::Service>,
) -> Result<Option<Json<String>>, BadRequest<String>> {
// find our collection. If None is returned by collections.get(), we'll return
// none too. This will turn into a 404 error.
let collection = match coverage.collections.get(&collection_id) {
Some(c) => c,
None => return Ok(None), // 404
};
// check if any filters were supplied. If not, return a STAC collection.
if intersects.is_none() && contains.is_none() {
let stac_collection = &collection.stac_collection(&coverage.base_url);
return Ok(Some(Json(to_string(stac_collection).unwrap())));
};
if intersects.is_some() && contains.is_some() {
return Err(BadRequest(Some("Use either intersects or contains, not both".into())))
}
let mut filtered_images: Vec<ImageryFile> = Vec::new();
// filter on possible intersects value
match intersects {
Some(wkt) => {
let bounds = query_to_bounds(wkt)?;
filtered_images = collection.intersects(&bounds);
},
None => (),
};
// filter on possible contains value
match contains {
Some(wkt) => {
let bounds = Polygon::try_from(query_to_bounds(wkt)?).unwrap();
filtered_images = collection.contains(&bounds);
},
None => (),
};
// handle sorting.
// currently only "spatial_resolution" is supported.
match sortby {
Some(s) => {
let mut sort_key = s.trim();
let mut ordering = SortOrder::Asc;
// sort by ascending
// note: Rocket parses + as whitespace.
// however, since + (ascending) is the default, that behavior doesn't seem to affect our
// ability to sort. This code path will only be triggered using `sortby=%2Bspatial_resolution`
match s.strip_prefix("+") {
Some(v) => {
sort_key = v;
},
None => (),
}
// sort by descending
match s.strip_prefix("-") {
Some(v) => {
ordering = SortOrder::Desc;
sort_key = v;
},
None => (),
}
// hopefully a temporary measure.
// ideally we could sort by any field of a Serde Map<String, Value> relatively
// dynamically.
if sort_key == "spatial_resolution" {
let cmp = match ordering {
SortOrder::Asc => |a: &ImageryFile, b: &ImageryFile| a.properties.resolution.avg().partial_cmp(&b.properties.resolution.avg()).unwrap(),
SortOrder::Desc => |a: &ImageryFile, b: &ImageryFile| b.properties.resolution.avg().partial_cmp(&a.properties.resolution.avg()).unwrap(),
} ;
filtered_images.sort_by(cmp)
}
else {
return Err(BadRequest(Some(
"sortby currently only supports `sortby=spatial_resolution`. \
Please file an issue to request sorting by more fields.".into()
)))
}
},
None => (),
}
match limit {
Some(lim) => {
filtered_images = filtered_images.into_iter().take(lim).collect::<Vec<_>>();
},
None => (),
}
Ok(Some(Json(to_string(&filtered_images.as_feature_collection()).unwrap())))
}
/// preflight request for the search_all_collections POST endpoint.
#[options("/stac/search")]
pub fn search_all_preflight() -> Status {
Status::Ok
}
/// SearchRequest represents the request body schema expected by the search_all_collections endpoint
#[derive(serde::Deserialize)]
pub struct SearchRequest {
bbox: Option<Vec<f64>>,
intersects: Option<String>,
contains: Option<String>,
sortby: Option<String>,
limit: Option<serde::json::Value>,
}
/// search_all_collections allows searching through every collection in the catalog at once.
/// note: much of this code is the same/similar to the collections search, and could be factored out into a
/// more modular function.
/// this endpoint works with https://github.com/sat-utils/sat-api-browser
#[post("/stac/search", data="<params>")]
pub fn search_all_collections(
params: serde::json::Json<SearchRequest>,
coverage: &State<catalog::Service>,
) -> Result<Option<Json<String>>, BadRequest<String>> {
let mut images: Vec<ImageryFile> = Vec::new();
// combine all the collections
// depending on the performance we could possibly create an index over all the collections on startup.
for (_, c) in coverage.collections.iter() {
images.extend(c.all().to_owned())
}
// We only want to do one spatial operation. To enforce this,
// make a vec of bools representing all the possible spatial query params.
// true becomes 1 when cast to an int, so we can add up all the `trues` to make
// sure that only one (or none) was provided.
let spatial_params_mask = vec![
params.intersects.is_some() as u8,
params.contains.is_some() as u8,
params.bbox.is_some() as u8
];
if spatial_params_mask.iter().sum::<u8>() > 1 {
return Err(BadRequest(Some("Use only one of: bbox, intersects or contains".into())))
}
// filter on a bbox.
// if bbox provided, we'll always do an intersects query (instead of contains)
match ¶ms.bbox {
Some(b) => {
let bounds: Polygon<f64> = bbox_to_bounds(b.to_vec())?.try_into().unwrap();
images = images.intersects(&bounds);
}
None => (),
}
// filter on possible intersects value
match ¶ms.intersects {
Some(wkt) => {
let bounds: Geometry<f64> = query_to_bounds(&wkt)?.try_into().unwrap();
images = images.intersects(&bounds);
},
None => (),
};
// filter on possible contains value
match ¶ms.contains {
Some(wkt) => {
let bounds = query_to_bounds(&wkt)?.try_into().unwrap();
images = images.contains_polygon(&bounds);
},
None => (),
};
// handle sorting.
// currently only "spatial_resolution" is supported.
match ¶ms.sortby {
Some(s) => {
let mut sort_key = s.trim();
let mut ordering = SortOrder::Asc;
// sort by ascending
match s.strip_prefix("+") {
Some(v) => {
sort_key = v;
},
None => (),
}
// sort by descending
match s.strip_prefix("-") {
Some(v) => {
ordering = SortOrder::Desc;
sort_key = v;
},
None => (),
}
if sort_key == "spatial_resolution" {
let cmp = match ordering {
SortOrder::Asc => |a: &ImageryFile, b: &ImageryFile| a.properties.resolution.avg().partial_cmp(&b.properties.resolution.avg()).unwrap(),
SortOrder::Desc => |a: &ImageryFile, b: &ImageryFile| b.properties.resolution.avg().partial_cmp(&a.properties.resolution.avg()).unwrap(),
} ;
images.sort_by(cmp)
}
else {
return Err(BadRequest(Some(
"sortby currently only supports `sortby=spatial_resolution`. \
Please file an issue to request sorting by more fields.".into()
)))
}
},
None => (),
}
// try to convert `limit` from a serde::json::Value into an integer (via a string, if necessary).
// this is here so that we can accept limit as an integer or a string (see the comments below).
// github.com/sat-utils/sat-api-browser provides the limit as a string.
match ¶ms.limit {
Some(v) => {
match v {
// limit supplied as a JSON number. e.g. `limit: 20`
serde_json::Value::Number(n) => {
match n.as_u64() {
Some(lim) => {
images = images.into_iter().take(lim as usize).collect::<Vec<_>>();
},
None => (),
}
},
// limit supplied as a JSON string. e.g. `limit: "20"`
serde_json::Value::String(s) => {
match s.parse::<u64>() {
Ok(lim) => {
images = images.into_iter().take(lim as usize).collect::<Vec<_>>();
},
Err(_) => (),
}
},
_ => ()
};
},
None => (),
}
Ok(Some(Json(to_string(&images.as_feature_collection()).unwrap())))
}
/// returns a tile from a collection item covering the tile defined by its x/y/z address.
/// work in progress, will probably be removed.
#[get("/tiles/<collection_id>/<z>/<x>/<y>")]
pub fn get_tiles(collection_id: String, z: u8, x:u32, y:u32, coverage: &State<catalog::Service>) -> String {
let bounds: Geometry<f64> = transform::to_bounds(x, y, z).try_into().unwrap();
let collection = coverage.collections.get(&collection_id).unwrap();
// currently this just returns files that could provide coverage for the tile.
let files_for_tile = collection.all().intersects(&bounds);
// stand-in for an actual tile
format!("{} {} {} :\n {:?} :\n {:?}", z, x, y, bounds, files_for_tile)
}
/// STAC API landing page
/// based on https://github.com/radiantearth/stac-api-spec/blob/master/overview.md#example-landing-page
#[get("/")]
pub fn landing(coverage: &State<catalog::Service>) -> Json<String> {
Json(to_string(&coverage.stac_landing()).unwrap())
}
|
extern crate seahash;
extern crate jump_consistent_hash;
use jump_consistent_hash::slot;
#[test]
#[ignore]
fn simulate_rebalance() {
let mut n = 1;
for i in 1..10 {
simulate(" mod", n, n + 1, bymod);
simulate("slot", n, n + 1, slot);
n += i;
}
}
fn bymod(key: u64, len: usize) -> u32 {
(key % (len as u64)) as u32
}
fn simulate<F>(name: &'static str, before: usize, after: usize, func: F)
where
F: Fn(u64, usize) -> u32,
{
let mut moves = 0;
for i in 0..65536 {
let b1 = func(i, before);
let b2 = func(i, after);
if b1 != b2 {
moves += 1;
}
}
println!("{:>5} {:>2}->{:<3} {:>6}", name, before, after, moves);
}
|
use std::collections::HashMap;
use std::ffi::{CStr, CString};
use std::path::Path;
use std::{mem, ptr};
use worker::{ffi, ComponentId, Connection, EntityId};
pub struct SnapshotOutputStream {
pointer: *mut ffi::Worker_SnapshotOutputStream,
_vtable: Box<ffi::Worker_ComponentVtable>,
}
impl Drop for SnapshotOutputStream {
fn drop(&mut self) {
unsafe {
ffi::Worker_SnapshotOutputStream_Destroy(self.pointer);
}
}
}
impl SnapshotOutputStream {
pub fn new<P: AsRef<Path>>(filename: P) -> SnapshotOutputStream {
let filename = CString::new(filename.as_ref().to_str().unwrap()).unwrap();
let vtable = Connection::default_vtable();
let vtable_ptr = Box::into_raw(vtable);
let params = ffi::Worker_SnapshotParameters {
component_vtable_count: 0,
component_vtables: ptr::null(),
default_component_vtable: vtable_ptr,
};
unsafe {
let pointer = ffi::Worker_SnapshotOutputStream_Create(filename.as_ptr(), ¶ms);
SnapshotOutputStream {
pointer,
_vtable: Box::from_raw(vtable_ptr),
}
}
}
pub fn write_entity(
&mut self,
components: HashMap<ComponentId, Box<ffi::Schema_ComponentData>>,
entity_id: EntityId,
) -> Result<(), &str> {
unsafe {
let mut components: Vec<ffi::Worker_ComponentData> = components
.into_iter()
.map(|(component_id, data)| {
let mut component_data: ffi::Worker_ComponentData = mem::zeroed();
component_data.component_id = component_id;
component_data.schema_type = Box::into_raw(data);
component_data
})
.collect();
let components_ptr = components.as_mut_ptr();
let entity = ffi::Worker_Entity {
entity_id: entity_id,
component_count: components.len() as u32,
components: components_ptr,
};
let result = ffi::Worker_SnapshotOutputStream_WriteEntity(self.pointer, &entity);
for component_data in components.iter() {
Box::from_raw(component_data.schema_type);
}
if result == 0 {
let error_ptr = ffi::Worker_SnapshotOutputStream_GetError(self.pointer);
let error = CStr::from_ptr(error_ptr);
Result::Err(error.to_str().unwrap())
} else {
Result::Ok(())
}
}
}
}
|
#![allow(clippy::unreadable_literal)]
//! Gruvbox
//! <https://github.com/morhetz/gruvbox>
use iced::color;
use crate::gui::styles::types::custom_palette::{CustomPalette, PaletteExtension};
use crate::gui::styles::types::palette::Palette;
/// Gruvbox (night style)
pub(in crate::gui::styles) fn gruvbox_dark() -> CustomPalette {
CustomPalette {
palette: Palette {
primary: color!(0x282828), // bg
secondary: color!(0xfe8019), // orange
outgoing: color!(0x8ec07c), // aqua
buttons: color!(0x7c6f64), // bg4
text_headers: color!(0x1d2021), // bg0_h
text_body: color!(0xebdbb2), // fg
},
extension: PaletteExtension {
starred: color!(0xd79921, 0.8),
chart_badge_alpha: 0.15,
round_borders_alpha: 0.12,
round_containers_alpha: 0.05,
},
}
}
/// Gruvbox (day style)
pub(in crate::gui::styles) fn gruvbox_light() -> CustomPalette {
CustomPalette {
palette: Palette {
primary: color!(0xfbf1c7), // bg
secondary: color!(0xd65d0e), // orange
outgoing: color!(0x689d6a), // aqua
buttons: color!(0xd5c4a1), // bg2
text_headers: color!(0xf9f5d7), // bg0_h
text_body: color!(0x282828), // fg
},
extension: PaletteExtension {
starred: color!(0xd79921, 0.8), // yellow
chart_badge_alpha: 0.75,
round_borders_alpha: 0.45,
round_containers_alpha: 0.2,
},
}
}
|
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum Primitive {
Boolean(bool),
Number(f64),
}
impl From<bool> for Primitive {
fn from(boolean: bool) -> Self {
Self::Boolean(boolean)
}
}
impl From<f64> for Primitive {
fn from(number: f64) -> Self {
Self::Number(number)
}
}
|
#[doc = "Register `HDP1R_PRG` reader"]
pub type R = crate::R<HDP1R_PRG_SPEC>;
#[doc = "Register `HDP1R_PRG` writer"]
pub type W = crate::W<HDP1R_PRG_SPEC>;
#[doc = "Field `HDP1_STRT` reader - HDPL barrier start set in number of 8-Kbyte sectors"]
pub type HDP1_STRT_R = crate::FieldReader;
#[doc = "Field `HDP1_STRT` writer - HDPL barrier start set in number of 8-Kbyte sectors"]
pub type HDP1_STRT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `HDP1_END` reader - HDPL barrier end set in number of 8-Kbyte sectors"]
pub type HDP1_END_R = crate::FieldReader;
#[doc = "Field `HDP1_END` writer - HDPL barrier end set in number of 8-Kbyte sectors"]
pub type HDP1_END_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
impl R {
#[doc = "Bits 0:6 - HDPL barrier start set in number of 8-Kbyte sectors"]
#[inline(always)]
pub fn hdp1_strt(&self) -> HDP1_STRT_R {
HDP1_STRT_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 16:22 - HDPL barrier end set in number of 8-Kbyte sectors"]
#[inline(always)]
pub fn hdp1_end(&self) -> HDP1_END_R {
HDP1_END_R::new(((self.bits >> 16) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - HDPL barrier start set in number of 8-Kbyte sectors"]
#[inline(always)]
#[must_use]
pub fn hdp1_strt(&mut self) -> HDP1_STRT_W<HDP1R_PRG_SPEC, 0> {
HDP1_STRT_W::new(self)
}
#[doc = "Bits 16:22 - HDPL barrier end set in number of 8-Kbyte sectors"]
#[inline(always)]
#[must_use]
pub fn hdp1_end(&mut self) -> HDP1_END_W<HDP1R_PRG_SPEC, 16> {
HDP1_END_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "FLASH HDP Bank 1 configuration\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hdp1r_prg::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hdp1r_prg::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HDP1R_PRG_SPEC;
impl crate::RegisterSpec for HDP1R_PRG_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hdp1r_prg::R`](R) reader structure"]
impl crate::Readable for HDP1R_PRG_SPEC {}
#[doc = "`write(|w| ..)` method takes [`hdp1r_prg::W`](W) writer structure"]
impl crate::Writable for HDP1R_PRG_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets HDP1R_PRG to value 0"]
impl crate::Resettable for HDP1R_PRG_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use bevy::{app::AppExit, prelude::*};
use big_brain::{pickers, prelude::*};
#[test]
fn steps() {
println!("steps test");
App::new()
.add_plugins(MinimalPlugins)
.add_plugin(BigBrainPlugin)
.init_resource::<GlobalState>()
.add_startup_system(setup)
.add_system_to_stage(CoreStage::First, no_failure_score)
.add_system(action1)
.add_system(action2)
.add_system(exit_action)
.add_system(failure_action)
.add_system_to_stage(CoreStage::Last, last)
.run();
println!("end");
}
fn setup(mut cmds: Commands) {
cmds.spawn().insert(
Thinker::build()
.picker(pickers::FirstToScore::new(0.5))
.when(NoFailureScore, Steps::build().step(FailureAction))
.otherwise(Steps::build().step(Action1).step(Action2).step(ExitAction)),
);
}
#[derive(Default, Debug, Clone)]
struct Action1;
impl ActionBuilder for Action1 {
fn build(&self, cmd: &mut Commands, action: Entity, _actor: Entity) {
cmd.entity(action)
.insert(self.clone())
.insert(ActionState::Requested);
}
}
fn action1(mut query: Query<(&Actor, &mut ActionState), With<Action1>>) {
for (Actor(_actor), mut state) in query.iter_mut() {
println!("action1 {:?}", state);
if *state == ActionState::Requested {
*state = ActionState::Executing;
}
if *state == ActionState::Executing {
*state = ActionState::Success;
}
}
}
#[derive(Default, Debug, Clone)]
struct Action2;
impl ActionBuilder for Action2 {
fn build(&self, cmd: &mut Commands, action: Entity, _actor: Entity) {
cmd.entity(action)
.insert(self.clone())
.insert(ActionState::Requested);
}
}
fn action2(mut query: Query<(&Actor, &mut ActionState), With<Action2>>) {
for (Actor(_actor), mut state) in query.iter_mut() {
println!("action2 {:?}", state);
if *state == ActionState::Requested {
*state = ActionState::Executing;
}
if *state == ActionState::Executing {
*state = ActionState::Success;
}
}
}
#[derive(Default, Debug, Clone)]
struct ExitAction;
impl ActionBuilder for ExitAction {
fn build(&self, cmd: &mut Commands, action: Entity, _actor: Entity) {
cmd.entity(action)
.insert(self.clone())
.insert(ActionState::Requested);
}
}
fn exit_action(
mut query: Query<(&Actor, &mut ActionState), With<ExitAction>>,
mut app_exit_events: EventWriter<AppExit>,
) {
for (Actor(_actor), mut state) in query.iter_mut() {
println!("exit_action {:?}", state);
if *state == ActionState::Requested {
*state = ActionState::Executing;
}
if *state == ActionState::Executing {
app_exit_events.send(AppExit);
}
}
}
fn last() {
println!();
}
#[derive(Default, Debug, Clone)]
struct FailureAction;
impl ActionBuilder for FailureAction {
fn build(&self, cmd: &mut Commands, action: Entity, _actor: Entity) {
cmd.entity(action)
.insert(self.clone())
.insert(ActionState::Requested);
}
}
fn failure_action(
mut query: Query<(&Actor, &mut ActionState), With<FailureAction>>,
mut global_state: ResMut<GlobalState>,
) {
for (Actor(_actor), mut state) in query.iter_mut() {
println!("failure_action {:?}", state);
if *state == ActionState::Requested {
*state = ActionState::Executing;
}
if *state == ActionState::Executing {
global_state.failure = true;
*state = ActionState::Failure;
}
}
}
#[derive(Default)]
struct GlobalState {
failure: bool,
}
#[derive(Debug, Clone)]
struct NoFailureScore;
impl ScorerBuilder for NoFailureScore {
fn build(&self, cmd: &mut Commands, action: Entity, _actor: Entity) {
cmd.entity(action).insert(self.clone());
}
}
fn no_failure_score(
mut query: Query<(&NoFailureScore, &mut Score)>,
global_state: Res<GlobalState>,
) {
for (_, mut score) in query.iter_mut() {
score.set(if global_state.failure { 0.0 } else { 1.0 });
}
}
|
use actix_web::{http::StatusCode, HttpResponse};
use serde::{Deserialize, Serialize};
use std::string::ToString;
#[derive(sqlx::FromRow, Debug, Serialize, Deserialize)]
pub struct FileResponse {
pub name: String,
pub link: String,
}
#[derive(Debug, Serialize, Deserialize, Display)]
pub enum Status {
SUCCESS,
UNSUCCESS,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TokenResponse {
pub token: String,
pub token_type: String,
}
impl TokenResponse {
pub fn new(token: &str) -> Self {
TokenResponse {
token: token.to_string(),
token_type: "bearer".to_owned(),
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ResponseBody<T> {
pub status: Status,
pub message: String,
pub data: Option<T>,
}
impl<T> ResponseBody<T>
where
T: Serialize,
{
pub fn new(status: Status, msg: &str) -> Self {
ResponseBody {
status,
message: msg.to_string(),
data: None,
}
}
pub fn add_data(mut self, data: T) -> Self {
self.data = Some(data);
self
}
pub fn to_response(&self, status: StatusCode) -> HttpResponse {
HttpResponse::build(status).json(self)
}
}
|
use crate::drivers::fe310_g002::{AONDriver, CLINTDriver, SPIDriver};
use register::{mmio::*, register_bitfields, register_structs};
register_bitfields! {
u32,
/// Ring Oscillator Configuration and Status
HFROSCCFG [
/// Ring Oscillator Divider Register
HFROSCDIV OFFSET(0) NUMBITS(6) [],
/// Ring Oscillator Trim Register
HFROSCTRIM OFFSET(16) NUMBITS(5) [],
/// Ring Oscillator Enable
HFROSCEN OFFSET(30) NUMBITS(1) [
Disabled = 0,
Enabled = 1
],
/// Ring Oscillator Ready
HFROSCRDY OFFSET(31) NUMBITS(1) [
NotReady = 0,
Ready = 1
]
],
/// Crystal Oscillator Configuration and Status
HFXOSCCFG [
/// Crystal Oscillator Enable
HFXOSCEN OFFSET(30) NUMBITS(1) [
Disabled = 0,
Enabled = 1
],
/// Crystal Oscillator Ready
HFXOSCRDY OFFSET(31) NUMBITS(1) [
NotReady = 0,
Ready = 1
]
],
/// PLL Configuration and Status
PLLCFG [
/// PLL R Value
PLLR OFFSET(0) NUMBITS(3) [],
/// PLL F Value
PLLF OFFSET(4) NUMBITS(6) [],
/// PLL Q Value
PLLQ OFFSET(10) NUMBITS(2) [],
/// PLL Select
PLLSEL OFFSET(16) NUMBITS(1) [],
/// PLL Reference Select
PLLREFSEL OFFSET(17) NUMBITS(1) [],
/// PLL Bypass
PLLBYPASS OFFSET(18) NUMBITS(1) [],
/// PLL Lock
PLLLOCK OFFSET(31) NUMBITS(1) []
],
/// PLL Final Divide Configuration
PLLOUTDIV [
/// PLL Final Divider Value
PLLOUTDIV OFFSET(0) NUMBITS(6) [],
/// PLL Final Divide By 1
PLLOUTDIVBY1 OFFSET(8) NUMBITS(6) []
],
/// Process Monitor Configuration and Status
PROCMONCFG [
/// Proccess Monitor Divide
PROCMONDIVSEL OFFSET(0) NUMBITS(5) [],
/// Process Monitor Delay Selector
PROCMONDELAYSEL OFFSET(8) NUMBITS(5) [],
/// Process Monitor Enable
PROCMONEN OFFSET(16) NUMBITS(1) [],
/// Process Monitor Select
PROCMONSEL OFFSET(24) NUMBITS(2) []
]
}
register_structs! {
#[allow(non_snake_case)]
pub RegisterBlock {
(0x00 => HFROSCCFG: ReadWrite<u32, HFROSCCFG::Register>),
(0x04 => HFXOSCCFG: ReadWrite<u32, HFXOSCCFG::Register>),
(0x08 => PLLCFG: ReadWrite<u32, PLLCFG::Register>),
(0x0C => PLLOUTDIV: ReadWrite<u32, PLLOUTDIV::Register>),
(0x10 => _reserved0),
(0xF0 => PROCMONCFG: ReadWrite<u32, PROCMONCFG::Register>),
(0xF4 => @END),
}
}
pub struct PRCIDriver {
base_address: usize,
}
impl core::ops::Deref for PRCIDriver {
type Target = RegisterBlock;
fn deref(&self) -> &Self::Target {
unsafe { &*self.ptr() }
}
}
impl PRCIDriver {
pub fn new(base_address: usize) -> Self {
PRCIDriver { base_address }
}
fn ptr(&self) -> *const RegisterBlock {
self.base_address as *const _
}
pub fn init(&self) -> crate::interface::driver::Result {
Ok(())
}
pub fn configure_hfrosc(&mut self, div: u32, trim: u32) {
self.HFROSCCFG.modify(
HFROSCCFG::HFROSCDIV.val(div)
+ HFROSCCFG::HFROSCTRIM.val(trim)
+ HFROSCCFG::HFROSCEN::Enabled,
);
loop {
if self.HFROSCCFG.is_set(HFROSCCFG::HFROSCRDY) {
break;
}
}
self.PLLCFG.modify(PLLCFG::PLLSEL::CLEAR);
}
// https://github.com/sifive/freedom-e-sdk/blob/v1_0/bsp/env/freedom-e300-hifive1/init.c#L55
#[inline(never)]
pub fn setup_pll_for_use(
&mut self,
refsel: u32,
bypass: bool,
r: u32,
f: u32,
q: u32,
spi: &mut SPIDriver,
aon: &AONDriver,
) {
// Ensure that we aren't running off the PLL before we mess with it.
if self.PLLCFG.is_set(PLLCFG::PLLSEL) {
// Make sure the HFROSC is running at its default setting
self.configure_hfrosc(4, 16);
}
// Set PLL Source to be HFXOSC if available.
let mut config_value = 0;
config_value |= PLLCFG::PLLREFSEL.val(refsel).value;
if bypass {
// Bypass
config_value |= PLLCFG::PLLBYPASS::SET.value;
self.PLLCFG.set(config_value);
// If we don't have an HFXTAL, this doesn't really matter.
// Set our Final output divide to divide-by-1:
self.PLLOUTDIV
.write(PLLOUTDIV::PLLOUTDIVBY1.val(1) + PLLOUTDIV::PLLOUTDIV.val(0));
} else {
// In case we are executing from QSPI,
// (which is quite likely) we need to
// set the QSPI clock divider appropriately
// before boosting the clock frequency.
// Div = f_sck/2
spi.set_clk_div(8);
// Set DIV Settings for PLL
// Both HFROSC and HFXOSC are modeled as ideal
// 16MHz sources (assuming dividers are set properly for
// HFROSC).
// (Legal values of f_REF are 6-48MHz)
// Set DIVR to divide-by-2 to get 8MHz frequency
// (legal values of f_R are 6-12 MHz)
config_value |= PLLCFG::PLLBYPASS::SET.value;
config_value |= PLLCFG::PLLR.val(r).value;
// Set DIVF to get 512Mhz frequncy
// There is an implied multiply-by-2, 16Mhz.
// So need to write 32-1
// (legal values of f_F are 384-768 MHz)
config_value |= PLLCFG::PLLF.val(f).value;
// Set DIVQ to divide-by-2 to get 256 MHz frequency
// (legal values of f_Q are 50-400Mhz)
config_value |= PLLCFG::PLLQ.val(q).value;
// Set our Final output divide to divide-by-1:
self.PLLOUTDIV
.write(PLLOUTDIV::PLLOUTDIVBY1.val(1) + PLLOUTDIV::PLLOUTDIV.val(0));
self.PLLCFG.set(config_value);
self.PLLCFG.modify(PLLCFG::PLLBYPASS::CLEAR);
// Problem is above this
// let now = clint.get_mtime_lo();
let now = aon.get_rtc_lo();
loop {
// if clint.get_mtime_lo() - now >= 4 {
// break;
// }
if aon.get_rtc_lo() - now >= 4 {
break;
}
}
loop {
if self.PLLCFG.is_set(PLLCFG::PLLLOCK) {
break;
}
}
}
// Switch over to PLL Clock source
self.PLLCFG.modify(PLLCFG::PLLSEL::SET);
}
// #[inline(never)]
// pub fn setup_pll_for_use(&mut self, refsel: u32, bypass: bool, r: u32, f: u32, q:u32, spi: &mut SPIDriver, aon:&AONDriver) {
// self.PLLCFG.write(PLLCFG::PLLREFSEL.val(1) + PLLCFG::PLLR.val(1) + PLLCFG::PLLF.val(80/2-1) + PLLCFG::PLLQ.val(1));
// for _ in 0..100 {
// unsafe { asm!("nop"); }
// }
// while !self.PLLCFG.is_set(PLLCFG::PLLLOCK) {}
// self.PLLCFG.modify(PLLCFG::PLLSEL.val(1));
// }
}
|
use std::iter::{Cycle, Peekable};
use tensorflow_protos::types::DataType;
use std::fmt::Display;
use codegen as cg;
use protobuf;
pub(crate) fn escape_keyword(name: &str) -> String {
match name {
"as" => format!("{}_", name),
"break" => format!("{}_", name),
"const" => format!("{}_", name),
"continue" => format!("{}_", name),
"crate" => format!("{}_", name),
"else" => format!("{}_", name),
"enum" => format!("{}_", name),
"extern" => format!("{}_", name),
"false" => format!("{}_", name),
"fn" => format!("{}_", name),
"for" => format!("{}_", name),
"if" => format!("{}_", name),
"impl" => format!("{}_", name),
"in" => format!("{}_", name),
"let" => format!("{}_", name),
"loop" => format!("{}_", name),
"match" => format!("{}_", name),
"mod" => format!("{}_", name),
"move" => format!("{}_", name),
"mut" => format!("{}_", name),
"pub" => format!("{}_", name),
"ref" => format!("{}_", name),
"return" => format!("{}_", name),
"self" => format!("{}_", name),
"Self" => format!("{}_", name),
"static" => format!("{}_", name),
"struct" => format!("{}_", name),
"super" => format!("{}_", name),
"trait" => format!("{}_", name),
"true" => format!("{}_", name),
"type" => format!("{}_", name),
"unsafe" => format!("{}_", name),
"use" => format!("{}_", name),
"where" => format!("{}_", name),
"while" => format!("{}_", name),
"dyn" => format!("{}_", name),
"abstract" => format!("{}_", name),
"become" => format!("{}_", name),
"box" => format!("{}_", name),
"do" => format!("{}_", name),
"final" => format!("{}_", name),
"macro" => format!("{}_", name),
"override" => format!("{}_", name),
"priv" => format!("{}_", name),
"typeof" => format!("{}_", name),
"unsized" => format!("{}_", name),
"virtual" => format!("{}_", name),
"yield" => format!("{}_", name),
"async" => format!("{}_", name),
"await" => format!("{}_", name),
"try" => format!("{}_", name),
"union" => format!("{}_", name),
_ => name.to_string(),
}
}
fn single_ascii_iter() -> Box<dyn Iterator<Item=char>> {
Box::new((0..26).map(|x| (x + b'a') as char))
}
struct SingleAsciiIter {
iter: Box<dyn Iterator<Item=char>>,
}
impl SingleAsciiIter {
fn new() -> Self {
Self {
iter: single_ascii_iter(),
}
}
}
impl Iterator for SingleAsciiIter {
type Item = char;
fn next(&mut self) -> Option<char> {
self.iter.next()
}
}
impl Clone for SingleAsciiIter {
fn clone(&self) -> Self {
Self::new()
}
}
/// Infinite iterator that produces unique strings of lower case ascii. e.g (a, b, c ... aa, ab, ac)
pub(crate) struct AsciiIter {
iters: Vec<Peekable<Cycle<SingleAsciiIter>>>,
pos: Vec<u8>,
}
impl AsciiIter {
pub(crate) fn new() -> Self {
Self {
iters: Vec::new(),
pos: Vec::new(),
}
}
fn add_char(&mut self) {
self.iters.push(SingleAsciiIter::new().cycle().peekable());
self.pos.push(0);
}
}
impl Iterator for AsciiIter {
type Item = String;
fn next(&mut self) -> Option<String> {
for (idx, i) in self.pos.iter_mut().enumerate() {
self.iters[idx].next();
*i += 1;
*i %= 26;
if *i != 0 {
return Some(self.iters.iter_mut().map(|x| x.peek().unwrap()).collect());
}
}
self.add_char();
Some(self.iters.iter_mut().map(|x| x.peek().unwrap()).collect())
}
}
pub(crate) fn tf_data_type_to_rust(data_type: DataType) -> Result<String, String> {
match data_type {
DataType::DT_FLOAT => Ok("f32".to_string()),
DataType::DT_DOUBLE => Ok("f64".to_string()),
DataType::DT_INT32 => Ok("i32".to_string()),
DataType::DT_UINT8 => Ok("u8".to_string()),
DataType::DT_INT16 => Ok("i16".to_string()),
DataType::DT_INT8 => Ok("i8".to_string()),
DataType::DT_STRING => Ok("String".to_string()),
DataType::DT_COMPLEX64 => Ok("OtherComplex<f32>".to_string()),
DataType::DT_INT64 => Ok("i64".to_string()),
DataType::DT_BOOL => Ok("bool".to_string()),
DataType::DT_QINT8 => Ok("i8".to_string()),
DataType::DT_QUINT8 => Ok("u8".to_string()),
DataType::DT_QINT32 => Ok("i32".to_string()),
DataType::DT_BFLOAT16 => Ok("BFloat16".to_string()),
DataType::DT_QINT16 => Ok("i16".to_string()),
DataType::DT_QUINT16 => Ok("u16".to_string()),
DataType::DT_UINT16 => Ok("u16".to_string()),
DataType::DT_COMPLEX128 => Ok("OtherComplex<f64>".to_string()),
DataType::DT_UINT32 => Ok("u32".to_string()),
DataType::DT_UINT64 => Ok("u64".to_string()),
_ => Err(format!("Unsupported data type {:?}", data_type))
}
}
pub(crate) fn wrap_type<T: Into<cg::Type>, U: Into<cg::Type>>(wrapper: T, inner: U) -> cg::Type {
let mut wrapped: cg::Type = wrapper.into();
wrapped.generic(inner);
wrapped
}
pub(crate) fn type_to_string(ty: &cg::Type) -> Result<String, String> {
let mut s = String::new();
let mut formatter = cg::Formatter::new(&mut s);
if let Err(_) = ty.fmt(&mut formatter) {
Err("Failed to format".to_string())
} else {
Ok(s)
}
}
pub(crate) fn dump_protobuf<T: protobuf::Message>(message: &T) -> Vec<u8>{
let mut buffer = Vec::new();
message.write_to_with_cached_sizes(&mut protobuf::CodedOutputStream::vec(&mut buffer)).expect("Failed to dump protobuf");
buffer
}
pub(crate) fn join_vec<T>(v: &Vec<T>, delim: &str) -> String
where for<'a> &'a T: Display {
let mut rtn = String::new();
if v.len() == 0 {
return "".to_string();
}
for elem in v.into_iter().take(v.len() - 1) {
rtn.push_str(&format!("{}{}", elem, delim));
}
rtn.push_str(&format!("{}", &v[v.len()-1]));
rtn
} |
use crate::switch::ToCKBCellDataTuple;
use crate::utils::config::{CKB_UNITS, PRE_UNDERCOLLATERAL_RATE, XT_CELL_CAPACITY};
use crate::utils::transaction::{get_price, get_sum_sudt_amount, is_XT_typescript, XChainKind};
use crate::utils::types::{Error, ToCKBCellDataView};
use ckb_std::ckb_constants::Source;
use ckb_std::debug;
use ckb_std::error::SysError;
use ckb_std::high_level::{
load_cell_capacity, load_cell_data, load_cell_lock, load_cell_lock_hash, load_cell_type,
};
use core::result::Result;
use molecule::prelude::*;
pub fn verify(toCKB_data_tuple: &ToCKBCellDataTuple) -> Result<(), Error> {
let input_toCKB_data = toCKB_data_tuple
.0
.as_ref()
.expect("inputs contain toCKB cell");
let lot_size = match input_toCKB_data.get_xchain_kind() {
XChainKind::Btc => input_toCKB_data.get_btc_lot_size()?.get_sudt_amount(),
XChainKind::Eth => input_toCKB_data.get_eth_lot_size()?.get_sudt_amount(),
};
verify_collateral_rate(lot_size)?;
verify_burn(lot_size, input_toCKB_data)
}
fn verify_burn(lot_size: u128, data: &ToCKBCellDataView) -> Result<(), Error> {
let lock_hash = load_cell_lock_hash(0, Source::GroupInput)?;
let mut is_signer = false;
let mut input_sudt_sum: u128 = 0;
let mut input_index = 0;
loop {
let cell_type = load_cell_type(input_index, Source::Input);
match cell_type {
Err(SysError::IndexOutOfBound) => break,
Err(_err) => panic!("iter input return an error"),
Ok(cell_type) => {
let lock = load_cell_lock(input_index, Source::Input)?;
if lock.as_bytes() == data.signer_lockscript {
is_signer = true;
}
if !is_XT_typescript(&cell_type, lock_hash.as_ref()) {
input_index += 1;
continue;
}
let data = load_cell_data(input_index, Source::Input)?;
let mut buf = [0u8; 16];
if data.len() == 16 {
buf.copy_from_slice(&data);
input_sudt_sum += u128::from_le_bytes(buf)
}
input_index += 1;
}
}
}
if !is_signer {
return Err(Error::InputSignerInvalid);
}
let output_sudt_sum = get_sum_sudt_amount(0, Source::Output, lock_hash.as_ref())?;
if input_sudt_sum - output_sudt_sum != lot_size {
return Err(Error::XTBurnInvalid);
}
Ok(())
}
fn verify_collateral_rate(lot_size: u128) -> Result<(), Error> {
let price = get_price()?;
let input_capacity = load_cell_capacity(0, Source::GroupInput)?;
debug!(
"input_capacity {}, price {}, lot_size {} ",
input_capacity, price, lot_size
);
if (100 * (input_capacity - XT_CELL_CAPACITY) as u128 * price) / (CKB_UNITS as u128)
>= PRE_UNDERCOLLATERAL_RATE as u128 * lot_size
{
return Err(Error::UndercollateralInvalid);
}
Ok(())
}
|
// implements the IO display submodule
pub fn show(/*image: &Image*/ window_name: &str) {
}
pub fn refresh() {
// refresh image windows
// don't sleep any threads. Not equivalent to wait(0)
}
pub fn wait(time: u128) {
// refresh image windows
if time > 0 {
// sleep current thread for 'time'
}
else {
// wait until keypress
}
} |
use menu::giphy::{GiphyPagination, GiphyResponse};
use serenity::framework::standard::{macros::command, Args, CommandError, CommandResult};
use serenity::model::channel::Message;
use serenity::prelude::*;
#[command]
#[aliases("gif")]
#[usage = "[keyword]"]
async fn giphy(context: &Context, message: &Message, args: Args) -> CommandResult {
let keyword = args.message().to_string();
let gifs = query(keyword.clone()).await.data;
if gifs.is_empty() {
return Err(CommandError::from(format!(
"No gif was found for `{}`.",
keyword
)));
}
GiphyPagination::init(&context, &message, gifs).await
}
pub async fn query(query: String) -> GiphyResponse {
let giphy_key = kankyo::key("GIPHY_API_KEY").expect("giphy_api_token");
let client = reqwest::Client::new();
let endpoint = if !query.is_empty() {
format!("search?q={}&", query)
} else {
"trending?".to_owned()
};
let request = format!(
"http://api.giphy.com/v1/gifs/{}api_key={}&fmt=json",
endpoint, giphy_key
);
let res = client.get(request.as_str()).send().await.expect("response");
res.json().await.expect("json")
}
|
use byteorder::{ByteOrder, LittleEndian};
use std::ops::{Deref, DerefMut};
/// Default, 64kb memory bus
pub struct MemoryBus {
ram: [u8; 1024 * 64],
}
impl MemoryBus {
pub fn new() -> MemoryBus {
MemoryBus { ram: [0; 1024 * 64] }
}
pub fn write_byte(&mut self, addr: u16, byte: u8) {
let addr = addr as usize;
self.ram[addr] = byte;
}
pub fn read_byte(&self, addr: u16) -> u8 {
let addr = addr as usize;
self.ram[addr]
}
pub fn read_u16(&self, addr: u16) -> u16 {
let addr = addr as usize;
LittleEndian::read_u16(&self.ram[addr..])
}
}
// Used in tests to verify specific memory states
impl Deref for MemoryBus {
type Target = [u8; 1024 * 64];
fn deref(&self) -> &Self::Target {
&self.ram
}
}
impl DerefMut for MemoryBus {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.ram
}
} |
use pyo3::prelude::*;
use pyo3::{PyObjectProtocol, exceptions};
// use pyo3::class::gc::{PyGCProtocol, PyVisit, PyTraverseError};
use std::fmt;
#[pyclass]
#[derive(Clone)]
pub struct SeqMatrix {
pub data: Vec<String>,
rows: usize,
cols: usize,
}
pub fn new_seqmatrix(sequences: Vec<String>) -> Result<SeqMatrix, String> {
let data = sequences.clone();
let rows = data.len();
let cols = if rows > 0 { data[0].chars().count() } else { 0 };
// Check whether each row has the same number of chars as the first row
if rows > 0 {
for row in sequences.iter() {
let cnt = row.chars().count();
if cols != cnt {
return Err(format!("detected different sequences lengths: {} != {}", cols, cnt))
}
}
}
Ok(SeqMatrix{ data, rows, cols })
}
// Rust functions
impl SeqMatrix {
// Getter functions
// #region
/// Returns the number of rows in the sequence matrix.
pub fn _nrows(&self) -> usize {
self.rows
}
/// Returns the number of columns in the sequence matrix.
pub fn _ncols(&self) -> usize {
self.cols
}
// #endregion
// Error methods
// #region
/// Returns an error if the matrix is empty.
pub fn _is_empty_matrix(&self) -> Result<(), String> {
if self.rows == 0 {
return Err("empty sequence matrix".to_owned())
}
Ok(())
}
/// Returns an error if a positive or negative index is greater than the size of the matrix
pub fn _is_valid_row_index(&self, i: i32) -> Result<(), String> {
// Error if a negative index normalized to the start (0) is a negative number.
if i < 0 {
let norm = self.rows as i32 + i;
if norm < 0 {
return Err(format!("row ID ({}) is out of range [0,{})", i, self.rows))
}
}
// Check positive ID
if i >= self.rows as i32 {
return Err(format!("row ID ({}) is out of range [0,{})", i, self.rows))
}
Ok(())
}
pub fn _is_valid_col_index(&self, i: i32) -> Result<(), String> {
// Error if a negative index normalized to the start (0) is a negative number.
if i < 0 {
let norm = self.cols as i32 + i;
if norm < 0 {
return Err(format!("column ID ({}) is out of range [0,{})", i, self.cols))
}
}
// Check positive ID
if i >= self.cols as i32 {
return Err(format!("column ID ({}) is out of range [0,{})", i, self.cols))
}
Ok(())
}
// #endregion
// Row methods
// #region
/// Returns a string sequence representing a row in the sequence matrix based on a given row index.
pub fn _get_row(&self, id: i32) -> Result<String, String> {
self._is_empty_matrix()?;
// Convert negative index (count from end) to positive (count from start)
let id: usize = if id < 0 { (self.rows as i32 + id) as usize } else { id as usize };
Ok(self.data[id].to_string())
}
/// Returns a vector of string sequences representing rows in the sequence matrix based on the given vector of indices.
pub fn _get_rows(&self, ids: Vec<i32>) -> Result<Vec<String>, String> {
self._is_empty_matrix()?;
if let Some(x) = ids.iter().max() {
self._is_valid_row_index(*x)?;
}
if let Some(x) = ids.iter().min() {
self._is_valid_row_index(*x)?;
}
// Normalize row ids to positive values and get rows
let result: Vec<String> = self._norm_rows(ids).into_iter()
.map(|i| self.data[i].clone())
.collect();
Ok(result)
}
/// Removes rows from the sequence matrix based on a list of row indices.
pub fn _remove_rows(&mut self, ids: Vec<i32>) -> Result<(), String> {
self._drop_rows(ids, false)
}
/// Keep rows matching the specified row indices, and removes everything else.
pub fn _retain_rows(&mut self, ids: Vec<i32>) -> Result<(), String> {
if ids.len() == 0 {
let ids: Vec<i32> = (0..self._nrows()).map(|i| i as i32).collect();
return self._drop_rows(ids, false)
}
return self._drop_rows(ids, true)
}
/// Generalized method used to remove rows from the sequence matrix.
fn _drop_rows(&mut self, ids: Vec<i32>, invert: bool) -> Result<(), String> {
self._is_empty_matrix()?;
if let Some(x) = ids.iter().max() {
self._is_valid_row_index(*x)?;
}
if let Some(x) = ids.iter().min() {
self._is_valid_row_index(*x)?;
}
// Normalize row ids to positive ids
let rows: Vec<usize> = self._norm_rows(ids);
// Keep data whose index is not found in the rows vector
// Remove if index is in the rows vector
self.data = self.data.clone().into_iter().enumerate()
.filter(|(i, _)| {
if invert {
// rows in ids will be retained
rows.contains(i)
} else {
// rows in ids will be removed
!rows.contains(i)
}
})
.map(|(_, x)| x )
.collect();
self.rows = self.data.len();
Ok(())
}
/// Reorders rows based on a given ordered vector of row indices.
pub fn _reorder_rows(&mut self, ids: Vec<i32>) -> Result<(), String> {
self._is_empty_matrix()?;
if ids.len() != self.rows {
return Err(format!("number of ids ({}) is not equal to the number of rows ({})", ids.len(), self.rows))
}
if let Some(x) = ids.iter().max() {
self._is_valid_row_index(*x)?;
}
if let Some(x) = ids.iter().min() {
self._is_valid_row_index(*x)?;
}
// Normalize row ids to positive ids
let rows: Vec<usize> = self._norm_rows(ids);
// Reorder using normalized row ids
self.data = rows.into_iter().map(|i| self.data[i].clone()).collect();
Ok(())
}
// #endregion
// Column methods
// #region
/// Returns a single contiguous n-char column of the sequence matrix as vector of String for a given column index and chunk size.
pub fn _get_chunk(&self, id: i32, chunk_size: usize) -> Result<Vec<String>, String> {
self._is_empty_matrix()?;
self._is_valid_col_index(id)?;
let col: usize = if id < 0 { (self.cols as i32 + id) as usize } else { id as usize };
let sequences: Vec<String> = self.data.iter()
.map(|row| {
let row: Vec<char> = row.chars().collect();
let seq: String = row[col..col+chunk_size].iter().collect();
seq
})
.collect();
Ok(sequences)
}
/// Returns one or more contiguous n-char columns of the sequence matrix as vector of vector of String for a given vector of column indices and a chunk size.
pub fn _get_chunks(&self, ids: Vec<i32>, chunk_size: usize) -> Result<Vec<Vec<String>>, String> {
self._is_empty_matrix()?;
let mut sorted_ids: Vec<i32> = ids.clone();
sorted_ids.sort_unstable();
if sorted_ids.len() == 0 {
return Ok(vec![Vec::new()])
} else if sorted_ids.len() == 1 {
self._is_valid_col_index(sorted_ids[0])?;
} else {
self._is_valid_col_index(sorted_ids[0])?;
self._is_valid_col_index(sorted_ids[sorted_ids.len()-1])?;
}
let seq_vec: Vec<Vec<char>> = self.data.iter()
.map(|row| row.chars().collect())
.collect();
let sequences_vec: Vec<Vec<String>> = self._norm_cols(ids).into_iter()
.map(|col| {
let sequences: Vec<String> = seq_vec.iter()
.map(|row| row[col..col+chunk_size].iter().collect())
.collect();
sequences
})
.collect();
Ok(sequences_vec)
}
/// Returns a vector of string sequence representing a column in the sequence matrix based on the given index.
pub fn _get_col(&self, id: i32) -> Result<String, String> {
match self._get_chunk(id, 1) {
Ok(x) => Ok(x.join("")),
Err(x) => Err(x)
}
}
/// Returns a vector of vector of string sequences representing columns in the sequence matrix based on the given vector of indices.
pub fn _get_cols(&self, ids: Vec<i32>) -> Result<Vec<String>, String> {
match self._get_chunks(ids, 1) {
Ok(x) => {
let sequences: Vec<String> = x.into_iter().map(
|x| x.join("")
).collect();
Ok(sequences)
},
Err(x) => Err(x)
}
}
/// Removes columns from the sequence matrix based on a list of column indices.
pub fn _remove_cols(&mut self, ids: Vec<i32>) -> Result<(), String> {
self._drop_cols(ids, false)
}
/// Keep columns matching the specified columns indices, and removes everything else.
pub fn _retain_cols(&mut self, ids: Vec<i32>) -> Result<(), String> {
if ids.len() == 0 {
let ids: Vec<i32> = (0..self._ncols()).map(|i| i as i32).collect();
return self._drop_cols(ids, false)
}
return self._drop_cols(ids, true)
}
/// Generalized method used to remove columns from the sequence matrix.
fn _drop_cols(&mut self, ids: Vec<i32>, invert: bool) -> Result<(), String> {
self._is_empty_matrix()?;
let mut sorted_ids: Vec<i32> = ids.clone();
sorted_ids.sort_unstable();
if sorted_ids.len() == 0 {
return Ok(())
} else if sorted_ids.len() == 1 {
self._is_valid_col_index(sorted_ids[0])?;
} else {
self._is_valid_col_index(sorted_ids[0])?;
self._is_valid_col_index(sorted_ids[sorted_ids.len()-1])?;
}
let cols: Vec<usize> = self._norm_cols(ids);
self.data = self.data.clone().into_iter()
.map(|row| {
let sequence: String = row.char_indices()
.filter(|(i, _)| {
if invert {
// cols in ids will be retained
cols.contains(i)
} else {
// cols in ids will be removed
!cols.contains(i)
}
})
.map(|(_, x)| x )
.collect();
sequence
})
.collect();
self.cols = self.data[0].len();
Ok(())
}
/// Reorders rows based on a given ordered vector of row indices.
pub fn _reorder_cols(&mut self, ids: Vec<i32>) -> Result<(), String> {
self._is_empty_matrix()?;
if ids.len() != self.cols {
return Err(format!("number of ids ({}) is not equal to the number of columns ({})", ids.len(), self.rows))
}
if let Some(x) = ids.iter().max() {
self._is_valid_col_index(*x)?;
}
if let Some(x) = ids.iter().min() {
self._is_valid_col_index(*x)?;
}
// Normalize col ids to positive ids
let cols: Vec<usize> = self._norm_cols(ids);
// Reorder using normalized col ids
self.data = self.data.clone().into_iter()
.map(|row| {
let seq_vec: Vec<char> = row.chars().collect();
let sequence: String = cols.iter()
.map(|j| seq_vec[*j])
.collect();
sequence
})
.collect();
Ok(())
}
// #endregion
// SeqMatrix methods
/// Concatenates sequence matrices across columns, preserving the number of rows.
pub fn _concat(&mut self, others: Vec<&SeqMatrix>) -> Result<(), String> {
self._is_empty_matrix()?;
if others.len() == 0 {
return Ok(())
} else {
let rows_true: usize = others.iter()
.map(|m| if self.rows == m.rows { 1 } else { 0 })
.sum();
if others.len() != rows_true {
return Err(format!("number of rows of other matrices is not equal to {}", self.rows))
}
}
// let mut sq = self._copy();
for aln in others.iter() {
for j in 0..self.data.len() {
self.data[j].push_str(&aln.data[j]);
}
}
self.cols = self.data[0].len();
Ok(())
}
// TODO: implement clone()
pub fn _copy(&self) -> SeqMatrix {
SeqMatrix{
data: self.data.clone(),
rows: self.rows,
cols: self.cols,
}
}
// Utility methods
// #region
/// Converts row indices into positive-value row indices.
pub fn _norm_rows(&self, ids: Vec<i32>) -> Vec<usize> {
let normed_rows: Vec<usize> = ids.iter()
.map(|i| {
if *i >= 0 {
*i as usize
} else {
(self.rows as i32 + *i) as usize
}
})
.collect();
normed_rows
}
/// Converts column indices into positive-value column indices.
pub fn _norm_cols(&self, ids: Vec<i32>) -> Vec<usize> {
let normed_cols: Vec<usize> = ids.iter()
.map(|i| {
if *i >= 0 {
*i as usize
} else {
(self.cols as i32 + *i) as usize
}
})
.collect();
normed_cols
}
/// Returns row indices not found in the given vector of row indices.
pub fn _invert_rows(&self, ids: Vec<usize>) -> Vec<usize> {
let rows: Vec<usize> = (0..self.rows)
.filter(|i| !ids.contains(i) )
.collect();
rows
}
/// Returns column indices not found in the given vector of column indices.
pub fn _invert_cols(&self, ids: Vec<usize>) -> Vec<usize> {
let cols: Vec<usize> = (0..self.cols)
.filter(|i| !ids.contains(i) )
.collect();
cols
}
// #endregion
}
// Wrappers for pyo3
#[pymethods]
impl SeqMatrix {
#[new]
/// Creates a new SeqMatrix object from a list of sequences.
fn __new__(obj: &PyRawObject, sequences: Vec<String>) -> PyResult<()> {
let seq_matrix = match new_seqmatrix(sequences) {
Ok(x) => x,
Err(x) => return Err(exceptions::ValueError::py_err(x)),
};
// Instantiates the struct
obj.init(|_| seq_matrix)
}
#[getter]
/// int: Returns the number of rows in the BaseAlignment.
fn nrows(&self) -> PyResult<i32> {
Ok(self._nrows() as i32)
}
#[getter]
/// int: Returns the number of columns in the alignment.
fn ncols(&self) -> PyResult<i32> {
Ok(self._ncols() as i32)
}
#[getter]
/// list of str: Returns the list of sequences.
fn data(&self) -> PyResult<Vec<String>> {
Ok(self.data.clone())
}
// Row methods
// #region
/// get_row(id, /)
/// --
///
/// Returns a string sequence from the sequence matrix based on the given row index.
fn get_row(&self, id: i32) -> PyResult<String> {
match self._get_row(id) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// get_rows(ids, /)
/// --
///
/// Returns a list of string sequences from the sequence matrix based on the given list of row indices.
fn get_rows(&self, ids: Vec<i32>) -> PyResult<Vec<String>> {
match self._get_rows(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// remove_rows(ids, /)
/// --
///
/// Removes rows from the sequence matrix based on a list of row indices.
fn remove_rows(&mut self, ids: Vec<i32>) -> PyResult<()> {
match self._remove_rows(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// retain_records(indices, /)
///
/// Keep rows matching the specified row indices, and removes everything else.
fn retain_rows(&mut self, ids: Vec<i32>) -> PyResult<()> {
match self._retain_rows(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// reorder_records(ids, /)
/// --
///
/// Reorders the sequences inplace based on a list of current row indices.
pub fn reorder_rows(&mut self, ids: Vec<i32>) -> PyResult<()> {
match self._reorder_rows(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
// #endregion
// Column methods
// #region
/// get_chunk(id, chunk_size, /)
/// --
///
/// Returns a single contiguous n-char column of the sequence matrix as list of str for a given column index and chunk size.
fn get_chunk(&self, id: i32, chunk_size: usize)
-> PyResult<Vec<String>> {
match self._get_chunk(id, chunk_size) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// get_chunks(ids, chunk_size, /)
/// --
///
/// Returns one or more contiguous n-char columns of the sequence matrix as list of list of str for a given vector of column indices and a chunk size.
fn get_chunks(&self, ids: Vec<i32>, chunk_size: usize)
-> PyResult<Vec<Vec<String>>> {
match self._get_chunks(ids, chunk_size) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// get_col(id, /)
/// --
///
/// Returns a list of sequence representing a column in the sequence matrix based on the given column index.
fn get_col(&self, id: i32) -> PyResult<String> {
match self._get_col(id) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// get_cols(ids, /)
/// --
///
/// Returns a list of list of sequences representing columns in the sequence matrix based on the given list of column indices.
fn get_cols(&self, ids: Vec<i32>) -> PyResult<Vec<String>> {
match self._get_cols(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// remove_cols(indices, /)
/// --
///
/// Removes many alignment columns simulatenously based on a list of column indices.
pub fn remove_cols(&mut self, ids: Vec<i32>) -> PyResult<()> {
match self._remove_cols(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// retain_cols(indices, /)
///
/// Keep alignment columns at the specified column indices and removes everything else.
pub fn retain_cols(&mut self, ids: Vec<i32>) -> PyResult<()> {
match self._retain_cols(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
/// reorder_cols(ids, /)
/// --
///
/// Reorders the alignment columns inplace based on a list of current column indices.
pub fn reorder_cols(&mut self, ids: Vec<i32>) -> PyResult<()> {
match self._reorder_cols(ids) {
Ok(res) => Ok(res),
Err(x) => Err(exceptions::IndexError::py_err(x)),
}
}
// #endregion
// SeqMatrix methods
// #region
/// concat(others, /)
/// --
///
/// Returns a new sequence matrix by concatenating this matrix to a list of other matrices.
pub fn concat(&mut self, others: Vec<&SeqMatrix>) -> PyResult<()> {
match self._concat(others) {
Ok(res) => Ok(res),
Err(x) => return Err(exceptions::ValueError::py_err(x)),
}
}
/// invert_rows(ids, /)
/// --
///
/// Returns row indices that are not part of the given list of row indices.
fn invert_rows(&self, ids: Vec<usize>) -> PyResult<Vec<usize>> {
Ok(self._invert_rows(ids))
}
/// invert_cols(ids, /)
/// --
///
/// Returns column indices that are not part of the given list of column indices.
fn invert_cols(&self, ids: Vec<usize>) -> PyResult<Vec<usize>> {
Ok(self._invert_cols(ids))
}
/// copy()
/// --
///
/// Returns a deep copy of the current sequence matrix.
fn copy(&self) -> PyResult<SeqMatrix> {
Ok(self._copy())
}
// #endregion
}
// Customizes __repr__ and __str__ of PyObjectProtocol trait
#[pyproto]
impl PyObjectProtocol for SeqMatrix {
fn __repr__(&self) -> PyResult<String> {
Ok(format!("SeqMatrix(nrows={nrows}, ncols={ncols})",
nrows=self.nrows()?, ncols=self.ncols()?))
}
fn __str__(&self) -> PyResult<String> {
Ok(self.data.clone().join("\n"))
}
// Determines the "truthyness" of the object
fn __bool__(&self) -> PyResult<bool> {
if self.ncols()? == 0 {
return Ok(false)
}
Ok(true)
}
}
// Implements equality comparison between SeqMatrix structs
impl PartialEq for SeqMatrix {
fn eq(&self, other: &SeqMatrix) -> bool {
self.data == other.data && self.rows == other.rows && self.cols == other.cols
}
}
// Implements Debug in order to use format! and other printout methods
impl fmt::Debug for SeqMatrix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "SeqMatrix {{ data: {:?}, rows: {}, cols: {} }}", self.data, self.rows, self.cols)
}
}
// #[pyproto]
// impl PyGCProtocol for BaseAlignment {
// fn __traverse__(&self, visit: PyVisit) -> Result<(), PyTraverseError> {
// if self.records.len() > 0 {
// for obj_ref in self.records.iter() {
// visit.call(obj_ref)?
// }
// }
// Ok(())
// }
// fn __clear__(&mut self) {
// if let Some(obj) = self.obj.take() {
// // Release reference, this decrements ref counter.
// self.py().release(obj);
// }
// }
// }
// #[pyfunction]
// pub fn from_list(sequences: Vec<String>) -> PyResult<BaseAlignment> {
// let data: Vec<String> = sequences.iter().map(
// |seq| seq.clone()
// ).collect();
// Ok(BaseAlignment{ data })
// }
// Register python functions to PyO3
#[pymodinit]
fn alignment(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_class::<SeqMatrix>()?;
// m.add_function(wrap_function!(from_list))?;
Ok(())
}
mod test {
use super::*;
// Test SeqMatrix creation
#[test]
fn test_new_seqmatrix() {
let exp = SeqMatrix{
data: vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
],
rows: 4,
cols: 4,
};
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
assert_eq!(exp, res);
}
// Test SeqMatrix methods
// Test getter methods
#[test]
fn test_nrows() {
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
assert_eq!(res.rows, 4);
}
#[test]
fn test_ncols() {
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
assert_eq!(res.cols, 4);
}
// TODO: Place subsequent tests in another file
// Test methods that used for checking
#[test]
fn test_is_empty_matrix() {
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
res._is_empty_matrix().unwrap();
}
#[test]
#[should_panic(expected = "empty sequence matrix")]
fn test_is_empty_matrix_empty() {
let res = new_seqmatrix(vec![]).unwrap();
res._is_empty_matrix().unwrap();
}
#[test]
fn test_is_valid_row_index() {
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
res._is_valid_row_index(3).unwrap();
}
#[test]
#[should_panic(expected = "row ID (4) is out of range [0,4)")]
fn test_is_valid_row_index_invalid() {
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
res._is_valid_row_index(4).unwrap();
}
#[test]
fn test_is_valid_col_index() {
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
res._is_valid_col_index(3).unwrap();
}
#[test]
#[should_panic(expected = "column ID (4) is out of range [0,4)")]
fn test_is_valid_col_index_invalid() {
let res = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
res._is_valid_col_index(4).unwrap();
}
// Test row methods
// Test _get_row and _get_rows
#[test]
fn test_get_row() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_row(0).unwrap();
assert_eq!(res, "atcg");
}
#[test]
fn test_get_row_negative_index() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_row(-1).unwrap();
assert_eq!(res, "tagc");
}
#[test]
fn test_get_rows() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_rows(vec![0,2]).unwrap();
assert_eq!(res, vec!["atcg", "atcc"]);
}
#[test]
fn test_get_rows_mixed_index() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_rows(vec![0,-1]).unwrap();
assert_eq!(res, vec!["atcg", "tagc"]);
}
#[test]
// Test remove rows
fn test_remove_rows() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec!["atgg".to_string(), "tagc".to_string()]).unwrap();
mat._remove_rows(vec![0, 2]).unwrap();
assert_eq!(mat, exp);
}
#[test]
// Test retain rows
fn test_retain_rows() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec!["atcg".to_string(), "atcc".to_string()]).unwrap();
mat._retain_rows(vec![0, 2]).unwrap();
assert_eq!(mat, exp);
}
#[test]
// Test retain rows base - drop rows, true
fn test_drop_rows_true() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec!["atcg".to_string(), "atcc".to_string()]).unwrap();
mat._drop_rows(vec![0, 2], true).unwrap();
assert_eq!(mat, exp);
}
#[test]
// Test remove rows base - drop rows, false
fn test_drop_rows_false() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec!["atgg".to_string(), "tagc".to_string()]).unwrap();
mat._drop_rows(vec![0, 2], false).unwrap();
assert_eq!(mat, exp);
}
#[test]
// Test reorder rows
fn test_reorder_rows() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec![
"atgg".to_string(), // 1
"tagc".to_string(), // 3
"atcg".to_string(), // 0
"atcc".to_string(), // 2
]).unwrap();
mat._reorder_rows(vec![1, 3, 0, 2]).unwrap();
assert_eq!(mat, exp);
}
// Test column methods
// Test _get_chunk and _get_chunks
#[test]
fn test_get_chunk() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_chunk(0, 1).unwrap();
assert_eq!(res, vec!["a","a","a","t"]);
}
#[test]
fn test_get_chunk_negative_index() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_chunk(-1, 1).unwrap();
assert_eq!(res, vec!["g","g","c","c"]);
}
#[test]
fn test_get_chunk_3() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_chunk(0, 3).unwrap();
assert_eq!(res, vec!["atc","atg","atc","tag"]);
}
#[test]
fn test_get_chunks() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_chunks(vec![0,2], 1).unwrap();
assert_eq!(res, vec![vec!["a","a","a","t"],vec!["c","g","c","g"]]);
}
#[test]
fn test_get_chunks_mixed_index() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_chunks(vec![0,-1], 1).unwrap();
assert_eq!(res, vec![vec!["a","a","a","t"],vec!["g","g","c","c"]]);
}
#[test]
fn test_get_chunks_3() {
let mat = new_seqmatrix(vec![
"atcgt".to_string(),
"atggt".to_string(),
"atccg".to_string(),
"tagcc".to_string(),
]).unwrap();
let res = mat._get_chunks(vec![0,2], 3).unwrap();
assert_eq!(res, vec![vec!["atc","atg","atc","tag"],vec!["cgt","ggt","ccg","gcc"]]);
}
// Tests _get_col and _get_cols
#[test]
fn test_get_col() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_col(0).unwrap();
assert_eq!(res, "aaat");
}
#[test]
fn test_get_col_negative_index() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_col(-1).unwrap();
assert_eq!(res, "ggcc");
}
#[test]
fn test_get_cols() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_cols(vec![0,2]).unwrap();
assert_eq!(res, vec!["aaat", "cgcg"]);
}
#[test]
fn test_get_cols_mixed_index() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let res = mat._get_cols(vec![0,-1]).unwrap();
assert_eq!(res, vec!["aaat", "ggcc"]);
}
// Tests _remove_cols
#[test]
fn test_remove_cols() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec![
"tg".to_string(),
"tg".to_string(),
"tc".to_string(),
"ac".to_string(),
]).unwrap();
mat._remove_cols(vec![0, 2]).unwrap();
assert_eq!(mat, exp);
}
// Tests _retain_cols
#[test]
fn test_retain_cols() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec![
"ac".to_string(),
"ag".to_string(),
"ac".to_string(),
"tg".to_string(),
]).unwrap();
mat._retain_cols(vec![0, 2]).unwrap();
assert_eq!(mat, exp);
}
// Tests _retain_cols base - _drop_cols, true
#[test]
fn test_drop_cols_true() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec![
"ac".to_string(),
"ag".to_string(),
"ac".to_string(),
"tg".to_string(),
]).unwrap();
mat._drop_cols(vec![0, 2], true).unwrap();
assert_eq!(mat, exp);
}
// Test _remove_cols base - _drop_cols, false
#[test]
fn test_drop_cols_false() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec![
"tg".to_string(),
"tg".to_string(),
"tc".to_string(),
"ac".to_string(),
]).unwrap();
mat._drop_cols(vec![0, 2], false).unwrap();
assert_eq!(mat, exp);
}
// Test _reorder_cols
#[test]
fn test_reorder_cols() {
let mut mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec![
"gatc".to_string(),
"gatg".to_string(),
"catc".to_string(),
"ctag".to_string(),
]).unwrap();
mat._reorder_cols(vec![3, 0, 1, 2]).unwrap();
assert_eq!(mat, exp);
}
// Test normalization of index values
#[test]
fn test_norm_rows() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
assert_eq!(mat._norm_rows(vec![0, -1, 2, -3, -4, 3]), vec![0, 3, 2, 1, 0, 3]);
}
#[test]
fn test_norm_cols() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
assert_eq!(mat._norm_cols(vec![0, -1, 2, -3, -4, 3]), vec![0, 3, 2, 1, 0, 3]);
}
// Test index inversion
#[test]
fn test_invert_rows() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
assert_eq!(mat._invert_rows(vec![3, 0]), vec![1, 2]);
}
#[test]
fn test_invert_cols() {
let mat = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
assert_eq!(mat._invert_cols(vec![3, 0]), vec![1, 2]);
}
// Tests concatenation
#[test]
fn test_concat() {
let mut mat1 = new_seqmatrix(vec![
"atcg".to_string(),
"atgg".to_string(),
"atcc".to_string(),
"tagc".to_string(),
]).unwrap();
let mat2 = new_seqmatrix(vec![
"aaaa".to_string(),
"tttt".to_string(),
"cccc".to_string(),
"gggg".to_string(),
]).unwrap();
let exp = new_seqmatrix(vec![
"atcgaaaa".to_string(),
"atggtttt".to_string(),
"atcccccc".to_string(),
"tagcgggg".to_string(),
]).unwrap();
mat1._concat(vec![&mat2]).unwrap();
assert_eq!(mat1, exp);
}
} |
use crate::ui::color::Color;
#[derive(Default)]
pub struct Animation {
start: (f64, f64),
end: (f64, f64),
start_time: i64,
end_time: i64,
}
#[derive(Default)]
pub struct Cursor {
/// Position, (row, col).
pub pos: Option<(f64, f64)>,
/// Flag for disabling the movement animation.
pub disable_animation: bool,
pub animation: Option<Animation>,
/// Alpha color. Used to make the cursor blink.
pub alpha: f64,
/// The duration of the blink.
pub blink_on: u64,
/// Width of the cursor.
pub cell_percentage: f64,
/// Color of the cursor.
pub color: Color,
}
impl Cursor {
pub fn goto(&mut self, row: f64, col: f64, frame_time: i64) {
// When we get our first cursor_goto, set the position directly.
if self.pos.is_none() {
self.pos = Some((row, col));
}
// If cursor animation is disabled, set the position directly. Otherwise, set the animation
// so that we can animate cursor position change.
if self.disable_animation {
self.pos = Some((row, col));
} else {
let duration = 100;
self.animation = Some(Animation {
start: self.pos.unwrap(),
end: (row, col),
start_time: frame_time,
end_time: frame_time + 1000 * duration,
});
}
}
pub fn tick(&mut self, frame_time: i64) {
self.blink();
self.animate_position(frame_time);
}
fn blink(&mut self) {
// If we dont need to blink, return.
if self.blink_on == 0 {
return;
}
// Assuming a 60hz framerate
self.alpha += 100.0 / (6.0 * self.blink_on as f64);
if self.alpha > 2.0 {
self.alpha = 0.0;
}
}
fn animate_position(&mut self, frame_time: i64) {
if let Some(Animation {
start,
end,
start_time,
end_time,
}) = self.animation
{
let mut pos = self.pos.unwrap_or((0.0, 0.0));
if frame_time < end_time && pos != end {
let mut t = (frame_time - start_time) as f64
/ (end_time - start_time) as f64;
t = ease_out_cubic(t);
pos.0 = start.0 + t * (end.0 - start.0);
pos.1 = start.1 + t * (end.1 - start.1);
self.pos = Some(pos);
} else {
self.pos = Some(end);
self.animation = None;
}
}
}
/// Gets the position of the cursor.
pub fn get_position(&self) -> Option<(f64, f64)> {
if let Some(ref a) = self.animation {
// The end position of our animation is the "real" position where
// the cursor is.
Some(a.end)
} else {
self.pos
}
}
}
/// From clutter-easing.c, based on Robert Penner's
/// infamous easing equations, MIT license.
fn ease_out_cubic(t: f64) -> f64 {
let p = t - 1f64;
p * p * p + 1f64
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_cursor_blink100() {
let mut cursor = Cursor::default();
cursor.blink_on = 100;
cursor.alpha = 1.0;
cursor.blink();
assert_eq!(cursor.alpha, 1.1666666666666667);
}
#[test]
fn test_cursor_blink0() {
let mut cursor = Cursor::default();
cursor.blink_on = 0;
cursor.alpha = 1.0;
cursor.blink();
assert_eq!(cursor.alpha, 1.0);
}
#[test]
fn test_first_position() {
let mut cursor = Cursor::default();
// When we first set the position, it should be set immediately.
cursor.goto(15.0, 15.0, 1);
assert_eq!(cursor.pos, Some((15.0, 15.0)));
// When we've set the position once already, the subsequent goto positions should be set
// with some delay by the animation.
cursor.goto(10.0, 10.0, 1);
assert_eq!(cursor.pos, Some((15.0, 15.0)));
}
#[test]
fn test_animate_position() {
let mut cursor = Cursor::default();
// When we first set the position, it should be set immediately.
cursor.goto(15.0, 15.0, 1);
assert_eq!(cursor.pos, Some((15.0, 15.0)));
cursor.goto(10.0, 10.0, 1);
cursor.tick(25000);
assert_eq!(cursor.pos, Some((12.109459376125006, 12.109459376125006)));
}
#[test]
fn test_animate_position_animation_disabled() {
let mut cursor = Cursor::default();
cursor.disable_animation = true;
// When we first set the position, it should be set immediately.
cursor.goto(15.0, 15.0, 1);
assert_eq!(cursor.pos, Some((15.0, 15.0)));
// Position animation is disabled, goto should change the position directly and tick
// shouldn't affect the position value at all.
cursor.goto(10.0, 10.0, 1);
assert_eq!(cursor.pos, Some((10.0, 10.0)));
cursor.tick(25000);
assert_eq!(cursor.pos, Some((10.0, 10.0)));
}
#[test]
fn test_get_position() {
let mut cursor = Cursor::default();
assert_eq!(cursor.get_position(), None);
cursor.pos = Some((10.0, 10.0));
assert_eq!(cursor.get_position(), Some((10.0, 10.0)));
cursor.animation = Some(Animation {
end: (15.0, 15.0),
..Animation::default()
});
assert_eq!(cursor.get_position(), Some((15.0, 15.0)));
}
}
|
extern crate num_bigint;
use std::io::{self, Read};
use num_bigint::{BigUint};
fn main() -> io::Result<()> {
// parsing gpg privatekey
let mut buffer = Vec::new();
io::stdin().read_to_end(&mut buffer)?;
let ns = &buffer[11..(11+128)];
let es = &buffer[(11+128+2)..(11+128+2+3)];
let n = BigUint::from_bytes_le(ns);
let e = BigUint::from_bytes_le(es);
println!("{} {}", n, e);
// find private key tag
// first bit of tag always set
// /
// | /-\ = length-type = 1, two-octet length
// 0x95 = || 1 0 0 1 // 0 1 0 1 ||
// \tag = 5/
// next two bytes encode length, 0x01d8 = 472
// Extract Public Key information
// Extract Private Key information
// Find Packet Tag 7
// 0x97 = || 1 0 0 1 // 1 1 0 1 ||
// bits 5-2 must encode 7
Ok(())
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagedPrivateEndpointListResponse {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ManagedPrivateEndpoint>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagedPrivateEndpoint {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ManagedPrivateEndpointProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagedPrivateEndpointProperties {
#[serde(rename = "privateLinkResourceId", default, skip_serializing_if = "Option::is_none")]
pub private_link_resource_id: Option<String>,
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "connectionState", default, skip_serializing_if = "Option::is_none")]
pub connection_state: Option<ManagedPrivateEndpointConnectionState>,
#[serde(rename = "isReserved", default, skip_serializing_if = "Option::is_none")]
pub is_reserved: Option<bool>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagedPrivateEndpointConnectionState {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<String>,
}
|
mod common;
#[test]
#[cfg(feature = "devkit-arm-tests")]
pub fn test_swi() {
let (cpu, _mem) = common::execute_arm(
"swi",
"
b main
b undefined_handler
b swi_handler
main:
mov r0, #4
ldr r3, =return_point
swi #6
return_point:
mov r2, #6
b _exit
undefined_handler:
b _exit
swi_handler:
mov r1, #5
mov r4, r14
movs r15, r14
b _exit
",
);
assert_eq!(cpu.registers.read(0), 4);
assert_eq!(cpu.registers.read(1), 5);
assert_eq!(cpu.registers.read(2), 6);
assert_eq!(cpu.registers.read(3), cpu.registers.read(4));
}
|
//! Manage the target observer
//!
//! The interogation that lading does of the target sub-process is intentionally
//! limited to in-process concerns, for the most part. The 'inspector' does
//! allow for a sub-process to do out-of-band inspection of the target but
//! cannot incorporate whatever it's doing into the capture data that lading
//! produces. This observer, on Linux, looks up the target process in procfs and
//! writes out key details about memory and CPU consumption into the capture
//! data. On non-Linux systems the observer, if enabled, will emit a warning.
use std::{io, sync::atomic::AtomicU64};
use nix::errno::Errno;
use serde::Deserialize;
use tokio::{self, sync::broadcast::Receiver};
use tracing;
use crate::signals::Shutdown;
#[cfg(target_os = "linux")]
use procfs::process::Process;
/// Expose the process' current RSS consumption, allowing abstractions to be
/// built on top in the Target implementation.
pub(crate) static RSS_BYTES: AtomicU64 = AtomicU64::new(0);
#[derive(Debug)]
/// Errors produced by [`Server`]
pub enum Error {
/// Wrapper for [`nix::errno::Errno`]
Errno(Errno),
/// Wrapper for [`std::io::Error`]
Io(io::Error),
#[cfg(target_os = "linux")]
/// Wrapper for [`procfs::ProcError`]
ProcError(procfs::ProcError),
}
#[derive(Debug, Deserialize, Clone, Copy, Default, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
/// Configuration for [`Server`]
pub struct Config {}
#[derive(Debug)]
/// The inspector sub-process server.
///
/// This struct manages a sub-process that can be used to do further examination
/// of the [`crate::target::Server`] by means of operating system facilities. The
/// sub-process is not created until [`Server::run`] is called. It is assumed
/// that only one instance of this struct will ever exist at a time, although
/// there are no protections for that.
pub struct Server {
#[allow(dead_code)] // config is not actively used, left as a stub
config: Config,
#[allow(dead_code)] // this field is unused when target_os is not "linux"
shutdown: Shutdown,
}
impl Server {
/// Create a new [`Server`] instance
///
/// The observer `Server` is responsible for investigating the
/// [`crate::target::Server`] sub-process.
///
/// # Errors
///
/// Function will error if the path to the sub-process is not valid or if
/// the path is valid but is not to file executable by this program.
pub fn new(config: Config, shutdown: Shutdown) -> Result<Self, Error> {
Ok(Self { config, shutdown })
}
/// Get all children of the specified process.
///
/// This ignores most errors in favor of creating a best-effort list of
/// children.
#[cfg(target_os = "linux")]
fn get_all_children(process: Process) -> Result<Vec<Process>, Error> {
let tree = process
.tasks()
.map_err(Error::ProcError)?
.flatten()
.flat_map(|t| t.children())
.flatten()
.flat_map(TryInto::try_into)
.flat_map(Process::new)
.flat_map(Self::get_all_children)
.flatten()
.chain(std::iter::once(process))
.collect();
Ok(tree)
}
/// Get process stats for the given process and all of its children.
#[cfg(target_os = "linux")]
fn get_proc_stats(
process: &Process,
) -> Result<Vec<(procfs::process::Stat, procfs::process::MemoryMaps)>, Error> {
let target_process = Process::new(process.pid()).map_err(Error::ProcError)?;
let target_and_children = Self::get_all_children(target_process)?;
let stats = target_and_children
.into_iter()
.map(|p| Ok((p.stat()?, p.smaps()?)))
.collect::<Result<Vec<_>, _>>()
.map_err(Error::ProcError)?;
Ok(stats)
}
/// Run this [`Server`] to completion
///
/// This function runs the user supplied program to its completion, or until
/// a shutdown signal is received. Child exit status does not currently
/// propagate. This is less than ideal.
///
/// Target server will use the `broadcast::Sender` passed here to transmit
/// its PID. This PID is passed to the sub-process as the first argument.
///
/// # Errors
///
/// Function will return an error if the underlying program cannot be waited
/// on or will not shutdown when signaled to.
///
/// # Panics
///
/// None are known.
#[allow(clippy::similar_names)]
#[cfg(target_os = "linux")]
pub async fn run(mut self, mut pid_snd: Receiver<u32>) -> Result<(), Error> {
use std::{sync::atomic::Ordering, time::Duration};
use metrics::gauge;
use procfs::Uptime;
let target_pid = pid_snd
.recv()
.await
.expect("target failed to transmit PID, catastrophic failure");
drop(pid_snd);
let process = Process::new(target_pid.try_into().expect("PID coercion failed"))
.map_err(Error::ProcError)?;
let ticks_per_second: f64 = procfs::ticks_per_second() as f64;
let page_size = procfs::page_size();
gauge!("ticks_per_second", ticks_per_second);
let mut procfs_delay = tokio::time::interval(Duration::from_secs(1));
loop {
tokio::select! {
_ = procfs_delay.tick() => {
if let (Ok(parent_stat), Ok(all_stats)) = (process.stat(), Self::get_proc_stats(&process)) {
// Calculate process uptime. We have two pieces of
// information from the kernel: computer uptime and
// process starttime relative to power-on of the
// computer.
let process_starttime_ticks: u64 = parent_stat.starttime;
let process_starttime_seconds: f64 = process_starttime_ticks as f64 / ticks_per_second;
let uptime_seconds: f64 = Uptime::new().expect("could not query uptime").uptime;
let process_uptime_seconds = uptime_seconds - process_starttime_seconds;
let cutime: u64 = all_stats.iter().map(|stat| <i64 as std::convert::TryInto<u64>>::try_into(stat.0.cutime).unwrap()).sum();
let cstime: u64 = all_stats.iter().map(|stat| <i64 as std::convert::TryInto<u64>>::try_into(stat.0.cstime).unwrap()).sum();
let utime: u64 = all_stats.iter().map(|stat| stat.0.utime).sum();
let stime: u64 = all_stats.iter().map(|stat| stat.0.stime).sum();
let kernel_time_seconds = (cstime + stime) as f64 / ticks_per_second;
let user_time_seconds = (cutime + utime) as f64 / ticks_per_second;
// The time spent in kernel-space in seconds.
gauge!("kernel_time_seconds", kernel_time_seconds);
// The time spent in user-space in seconds.
gauge!("user_time_seconds", user_time_seconds);
// The uptime of the process in fractional seconds.
gauge!("uptime_seconds", process_uptime_seconds);
let rss: u64 = all_stats.iter().fold(0, |val, stat| val.saturating_add(stat.0.rss));
let pss: u64 = all_stats.iter().fold(0, |val, stat| {
let one_proc = stat.1.iter().fold(0u64, |one_map, stat| {
one_map.saturating_add(stat.extension.map.get("Pss").copied().unwrap_or_default())
});
val.saturating_add(one_proc)
});
let rsslim: u64 = all_stats.iter().fold(0, |val, stat| val.saturating_add(stat.0.rsslim));
let vsize: u64 = all_stats.iter().fold(0, |val, stat| val.saturating_add(stat.0.vsize));
let num_threads: u64 = all_stats.iter().map(|stat| <i64 as std::convert::TryInto<u64>>::try_into(stat.0.num_threads).unwrap()).sum();
let rss_bytes: u64 = rss*page_size;
RSS_BYTES.store(rss_bytes, Ordering::Relaxed);
// Number of pages that the process has in real memory.
gauge!("rss_bytes", rss_bytes as f64);
// Proportional share of bytes owned by this process and its children.
gauge!("pss_bytes", pss as f64);
// Soft limit on RSS bytes, see RLIMIT_RSS in getrlimit(2).
gauge!("rsslim_bytes", rsslim as f64);
// The size in bytes of the process in virtual memory.
gauge!("vsize_bytes", vsize as f64);
// Number of threads this process has active.
gauge!("num_threads", num_threads as f64);
// Number of processes this target has active
gauge!("num_processes", all_stats.len() as f64);
}
}
_ = self.shutdown.recv() => {
tracing::info!("shutdown signal received");
return Ok(());
}
}
}
}
/// "Run" this [`Server`] to completion
///
/// On non-Linux systems, this function is a no-op that logs a warning
/// indicating observer capabilities are unavailable on these systems.
///
/// # Errors
///
/// None are known.
///
/// # Panics
///
/// None are known.
#[allow(clippy::unused_async)]
#[cfg(not(target_os = "linux"))]
pub async fn run(self, _pid_snd: Receiver<u32>) -> Result<(), Error> {
tracing::warn!("observer unavailable on non-Linux system");
Ok(())
}
}
#[cfg(test)]
mod tests {
#[test]
#[cfg(target_os = "linux")]
fn observer_observes_process_hierarchy() {
use super::*;
use std::{process::Command, time::Duration};
let mut test_proc = Command::new("/bin/sh")
.args(["-c", "sleep 1"])
.spawn()
.expect("launch child process");
// wait for `sh` to launch `sleep`
std::thread::sleep(Duration::from_millis(250));
let proc =
Process::new(test_proc.id().try_into().unwrap()).expect("create Process from PID");
let stats = Server::get_proc_stats(&proc).expect("get proc stat hierarchy");
test_proc.kill().unwrap();
let mut bins = stats.iter().map(|s| s.0.comm.clone()).collect::<Vec<_>>();
bins.sort();
assert_eq!(&bins, &[String::from("sh"), String::from("sleep")]);
}
}
|
/*!
```rudra-poc
[target]
crate = "arenavec"
version = "0.1.1"
[report]
issue_url = "https://github.com/ibabushkin/arenavec/issues/1"
issue_date = 2021-01-12
rustsec_url = "https://github.com/RustSec/advisory-db/pull/815"
rustsec_id = "RUSTSEC-2021-0040"
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "PanicSafety"
bug_count = 3
rudra_report_locations = ["src/common.rs:75:5: 89:6", "src/common.rs:418:5: 443:6", "src/common.rs:446:5: 471:6"]
```
!*/
#![forbid(unsafe_code)]
// tested with rustc 1.50.0-nightly (7f9c43cf9 2020-12-23) on Ubuntu 18.04
use arenavec::rc::{Arena, SliceVec};
use arenavec::ArenaBacking;
use std::sync::atomic::{AtomicBool, Ordering::SeqCst};
#[derive(Clone)]
struct Foo(usize, Option<u64>);
impl Drop for Foo {
fn drop(&mut self) {
println!("Dropping {:?}", self.0);
if self.0 == 1 && ATOMIC_TRUE.compare_and_swap(true, false, SeqCst) {
println!("THIS WILL PANIC {:?}", self.1.as_ref().unwrap());
}
}
}
static ATOMIC_TRUE: AtomicBool = AtomicBool::new(true);
const DEFAULT_CAPACITY: usize = 4096 << 8;
fn main() {
let arena = Arena::init_capacity(ArenaBacking::SystemAllocation, DEFAULT_CAPACITY).unwrap();
let mut vec: SliceVec<Foo> = SliceVec::new(arena.inner());
vec.push(Foo(0, Some(12)));
vec.push(Foo(1, None));
assert_eq!(vec.len(), 2);
vec.resize(1, Foo(99, Some(78)));
}
|
pub mod response;
use crate::config::MailConfig;
use rocket::State;
use rocket::response::content::Xml;
use serde::Serialize;
#[get("/.well-known/autoconfig/mail/config-v1.1.xml?<emailaddress>")]
pub fn autoconfig_wellknown(config: &State<MailConfig>, emailaddress: String) -> Xml<String> {
autoconfig(config, emailaddress)
}
#[get("/mail/config-v1.1.xml?<emailaddress>")]
pub fn autoconfig(config: &State<MailConfig>, emailaddress: String) -> Xml<String> {
let domain = emailaddress.split("@").last().unwrap();
let domain = config.domains.get(domain).unwrap();
let mut incoming_servers = vec![];
let mut outgoing_servers = vec![];
for server in &domain.servers {
let socket_ty = match server.encrypt.as_str() {
"ssl" => "SSL".to_string(),
"starttls" => "STARTTLS".to_string(),
_ => "plain".to_string(),
};
let auth = match server.auth.as_str() {
_ => "password-cleartext".to_string(),
};
let item = response::Server {
ty: server.protocol.clone(),
hostname: response::Hostname(server.hostname.clone()),
port: response::Port(server.port),
socket_type: response::SocketType(socket_ty),
auth: response::Authentication(auth),
username: response::Username(emailaddress.clone()),
};
if server.protocol == "smtp" {
outgoing_servers.push(item);
} else {
incoming_servers.push(item);
}
}
let response = response::ClientConfig {
version: "1.1".to_string(),
providers: vec![
response::Provider {
id: "synkhronix.com".to_string(),
domain: response::Domain(domain.domain.clone()),
display_name: response::DisplayName(domain.name.clone()),
display_short_name: response::ShortName(domain.short_name.clone()),
incoming_servers,
outgoing_servers,
},
],
};
let mut data = Vec::new();
let writer = quick_xml::Writer::new(&mut data);
let mut serializer = quick_xml::se::Serializer::with_root(writer, Some("clientConfig"));
let _ = response.serialize(&mut serializer);
let response = String::from_utf8(data).unwrap();
Xml(format!("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>{}", response))
}
|
use ::*;
pub fn set_element_css_size(target: Selector, size: (f64, f64)) -> HtmlResult<()> {
let result = unsafe {
emscripten_set_element_css_size(
selector_as_ptr!(target),
size.0 as c_double,
size.0 as c_double,
)
};
match parse_html_result(result) {
None => Ok(()),
Some(err) => Err(err),
}
}
pub fn get_element_css_size(target: Selector) -> HtmlResult<(f64, f64)> {
let mut width: c_double = unsafe { mem::uninitialized() };
let mut height: c_double = unsafe { mem::uninitialized() };
let result = unsafe {
emscripten_get_element_css_size(selector_as_ptr!(target), &mut width, &mut height)
};
match parse_html_result(result) {
None => Ok((width as f64, height as f64)),
Some(err) => Err(err),
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// UsageSyntheticsBrowserResponse : Response containing the number of Synthetics Browser tests run for each hour for a given organization.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageSyntheticsBrowserResponse {
/// Get hourly usage for Synthetics Browser tests.
#[serde(rename = "usage", skip_serializing_if = "Option::is_none")]
pub usage: Option<Vec<crate::models::UsageSyntheticsBrowserHour>>,
}
impl UsageSyntheticsBrowserResponse {
/// Response containing the number of Synthetics Browser tests run for each hour for a given organization.
pub fn new() -> UsageSyntheticsBrowserResponse {
UsageSyntheticsBrowserResponse {
usage: None,
}
}
}
|
extern crate reqwest;
use reqwest::{header, blocking::multipart};
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut headers = header::HeaderMap::new();
headers.insert("Authorization", "Bearer ACCESS_TOKEN".parse().unwrap());
headers.insert("X-Nice", "Header".parse().unwrap());
let form = multipart::Form::new()
.text("attributes", "{\"name\":\"tigers.jpeg\", \"parent\":{\"id\":\"11446498\"}}")
.file("file", "myfile.jpg")?;
let client = reqwest::blocking::Client::builder()
.redirect(reqwest::redirect::Policy::none())
.build()
.unwrap();
let res = client.post("http://localhost:28139/api/2.0/files/content")
.headers(headers)
.multipart(form)
.send()?
.text()?;
println!("{}", res);
Ok(())
}
|
use mcfg::shared::builders::Builder;
use mcfg::shared::packages::builders::{PackageBuilder, PackageSetBuilder};
use mcfg::shared::{Name, PackageSet};
use pretty_assertions::assert_eq;
use std::collections::HashMap;
use std::path::PathBuf;
use std::str::FromStr;
#[test]
fn test_minimal_package_set() {
let package_set = PackageSetBuilder::named(Name::from_str("example").unwrap()).build();
assert_eq!(package_set.name(), &String::from("example"));
assert_eq!(package_set.path(), &PathBuf::default());
assert_eq!(package_set.description(), &None);
assert_eq!(package_set.is_optional(), false);
assert_eq!(package_set.run_before(), &None);
assert_eq!(package_set.has_actions(), false);
assert_eq!(package_set.env_file(), &None);
assert_eq!(package_set.link_files(), &HashMap::default());
assert_eq!(package_set.run_after(), &None);
let package_set_str = serde_yaml::to_string(&package_set).unwrap();
println!("{}", package_set_str);
let new_package_set = serde_yaml::from_str(&package_set_str).unwrap();
assert_eq!(package_set, new_package_set);
}
#[test]
fn test_package_set_with_packages() {
let package_set = PackageSetBuilder::named(Name::from_str("example").unwrap())
.description("an example package set, with package actions")
.optional()
.run_before("{{local-bin}}/ex-pre-install")
.with_package_actions()
.add_package_action(PackageBuilder::named(Name::from_str("expackage").unwrap()).build())
.unwrap()
.env_file("example.env")
.run_after("{{local-bin}}/ex-post-install")
.build();
assert_eq!(package_set.name(), &String::from("example"));
assert_eq!(
package_set.description(),
&Some("an example package set, with package actions".to_string())
);
assert_eq!(package_set.is_optional(), true);
assert_eq!(
package_set.run_before(),
&Some("{{local-bin}}/ex-pre-install".to_string())
);
assert_eq!(package_set.has_actions(), true);
assert_eq!(package_set.packages().unwrap().count(), 1);
assert_eq!(package_set.env_file(), &Some("example.env".to_string()));
assert_eq!(package_set.link_files(), &HashMap::default());
assert_eq!(
package_set.run_after(),
&Some("{{local-bin}}/ex-post-install".to_string())
);
let package_set_str = serde_yaml::to_string(&package_set).unwrap();
println!("{}", package_set_str);
let new_package_set = serde_yaml::from_str(&package_set_str).unwrap();
assert_eq!(package_set, new_package_set);
}
#[test]
fn test_package_set_with_scripts() {
let package_set = PackageSetBuilder::named(Name::from_str("example").unwrap())
.description("an example package set, with package actions")
.optional()
.run_before("{{local-bin}}/ex-pre-install")
.with_script_actions()
.add_install_script_action("{{local-bin}}/ex-installer")
.unwrap()
.add_uninstall_script_action("{{local-bin}}/ex-uninstaller")
.unwrap()
.env_file("example.env")
.run_after("{{local-bin}}/ex-post-install")
.build();
assert_eq!(package_set.name(), &String::from("example"));
assert_eq!(
package_set.description(),
&Some("an example package set, with package actions".to_string())
);
assert_eq!(package_set.is_optional(), true);
assert_eq!(
package_set.run_before(),
&Some("{{local-bin}}/ex-pre-install".to_string())
);
assert_eq!(package_set.has_actions(), true);
assert_eq!(package_set.scripts().unwrap().len(), 2);
assert_eq!(package_set.env_file(), &Some("example.env".to_string()));
assert_eq!(package_set.link_files(), &HashMap::default());
assert_eq!(
package_set.run_after(),
&Some("{{local-bin}}/ex-post-install".to_string())
);
let package_set_str = serde_yaml::to_string(&package_set).unwrap();
println!("{}", package_set_str);
let new_package_set = serde_yaml::from_str(&package_set_str).unwrap();
assert_eq!(package_set, new_package_set);
}
#[test]
fn test_package_set_with_a_lot() {
let package_set = PackageSetBuilder::named(Name::from_str("gpg").unwrap())
.description("Gnu Privacy Guard")
.env_var("gpg_home", "{{home}}/.gnupg")
.package_actions(&[
PackageBuilder::named(Name::from_str("gpg").unwrap()).build(),
PackageBuilder::named(Name::from_str("pinentry-gnome3").unwrap())
.for_linux_only()
.build(),
PackageBuilder::named(Name::from_str("pinentry-mac").unwrap())
.for_macos_only()
.build(),
])
.add_link_file("gpg.conf", "{{gpg_home}}/gpg.conf")
.add_link_file(
"gpg-agent-{{platform_os}}.conf",
"{{gpg_home}}/gpg-agent.conf",
)
.run_after("gpg --list-keys")
.build();
assert_eq!(package_set.name(), &String::from("gpg"));
assert_eq!(
package_set.description(),
&Some("Gnu Privacy Guard".to_string())
);
assert_eq!(package_set.has_actions(), true);
assert_eq!(package_set.packages().unwrap().count(), 3);
assert_eq!(package_set.link_files().len(), 2);
assert_eq!(
package_set.run_after(),
&Some("gpg --list-keys".to_string())
);
let package_set_str = serde_yaml::to_string(&package_set).unwrap();
println!("{}", package_set_str);
let new_package_set = serde_yaml::from_str(&package_set_str).unwrap();
assert_eq!(package_set, new_package_set);
}
#[test]
fn test_parse_package_set_with_packages() {
let config_str = r##"
name: lux
env-file: sample.env
actions:
packages:
- name: lux
kind:
language: python
link-files:
set-lux: "{{local-bin}}/set-lux"
"##;
let package_set: PackageSet = serde_yaml::from_str(config_str).unwrap();
println!("{:?}", package_set);
assert_eq!(package_set.name(), "lux");
assert_eq!(package_set.env_file(), &Some("sample.env".to_string()));
assert_eq!(package_set.packages().unwrap().count(), 1);
assert!(package_set.scripts().is_none());
assert_eq!(package_set.link_files().len(), 1)
}
#[test]
fn test_parse_package_set_with_scripts() {
let config_str = r##"
name: lux
env-file: sample.env
actions:
scripts:
install: install-lux
uninstall: uninstall-lux
link-files:
set-lux: "{{local-bin}}/set-lux"
"##;
let package_set: PackageSet = serde_yaml::from_str(config_str).unwrap();
println!("{:?}", package_set);
assert_eq!(package_set.name(), "lux");
assert_eq!(package_set.env_file(), &Some("sample.env".to_string()));
assert!(package_set.packages().is_none());
assert_eq!(package_set.scripts().unwrap().len(), 2);
assert_eq!(package_set.link_files().len(), 1)
}
|
pub struct Enumeration {
enumerate_all: bool,
enumerate_oks: bool,
all: Option<usize>,
oks: Option<usize>,
}
impl Enumeration {
pub fn new(enumerate_all: bool, enumerate_oks: bool) -> Enumeration {
Enumeration {
enumerate_all,
enumerate_oks,
all: None,
oks: None,
}
}
pub fn enumerate(
&mut self,
result: Result<String, String>,
) -> (Option<usize>, Option<usize>, Result<String, String>) {
(
{
if self.enumerate_all {
self.all = self.all.or(Some(0)).and_then(|count| count.checked_add(1));
}
self.all
},
{
if self.enumerate_oks && result.is_ok() {
self.oks = self.oks.or(Some(0)).and_then(|count| count.checked_add(1));
}
self.oks
},
result,
)
}
}
|
use clap::{Arg, App};
#[derive(Debug, PartialEq)]
pub enum Algo {
Gradient, Ols
}
#[derive(Debug, PartialEq)]
pub struct Config {
pub file: String,
pub algo: Algo,
}
impl Config {
pub fn new() -> Self {
let matches = App::new("learn")
.version("0.1.0")
.author("Simon Galasso <simon.galasso@hotmail.fr>")
.about("Perform a linear regression on a given dataset")
.arg(Arg::with_name("file")
.required(true)
.index(1)
.help("Path to the data file"))
.arg(Arg::with_name("algo")
.required(false)
.short("a")
.long("algo")
.takes_value(true)
.help("Algo selection, choose from 'ols' or 'gradient'"))
.get_matches();
return Config {
file: matches.value_of("file").unwrap_or("").to_string(),
algo: match matches.value_of("algo").unwrap_or("conflict") {
"gradient" => Algo::Gradient,
"ols" => Algo::Ols,
_ => Algo::Gradient
}
};
}
} |
use sequence::{Sequence, MultiCache};
use buffer::Buffer;
mod half;
mod head;
use self::half::{Half, AdvanceError};
use self::head::{Head, SenderHead, SenderHalf, ReceiverHead, ReceiverHalf};
#[derive(Debug)]
pub struct Sender<S: Sequence, R: Sequence, T> {
half: Option<SenderHalf<S, R, T>>,
}
#[derive(Debug)]
pub struct Receiver<S: Sequence, R: Sequence, T> {
half: Option<ReceiverHalf<S, R, T>>,
}
#[derive(Debug, PartialEq, Eq)]
pub enum SendError<T> {
BufferFull(T),
Closed(T),
}
impl<T> From<AdvanceError<T>> for SendError<T> {
fn from(e: AdvanceError<T>) -> Self {
match e {
AdvanceError::BufferFull(v) => SendError::BufferFull(v),
AdvanceError::Closed(v) => SendError::Closed(v),
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct RecvError;
pub fn queue<S, R, T>(capacity: usize) -> (Sender<S, R, T>, Receiver<S, R, T>) where
S: Sequence, R: Sequence
{
let head = Head::new(S::default(), R::default());
let sender = SenderHead::new(head.clone(), capacity);
let receiver = ReceiverHead::new(head.clone());
let buffer = Buffer::new(head, capacity);
// unwrap() is ok for newly created half
let sender_half = Half::new(buffer.clone(), sender).unwrap();
let receiver_half = Half::new(buffer, receiver).unwrap();
let sender = Sender {
half: Some(sender_half),
};
let receiver = Receiver {
half: Some(receiver_half),
};
(sender, receiver)
}
impl<S: Sequence, R: Sequence, T> Sender<S, R, T> {
pub fn is_closed(&self) -> bool {
self.half.as_ref().map_or(true, |half| half.is_closed())
}
pub fn close(&mut self) {
self.half.as_mut().map_or((), Half::close)
}
pub fn try_send(&mut self, msg: T) -> Result<(), SendError<T>> {
if let Some(half) = &mut self.half {
half.try_advance(msg).map_err(SendError::from)
} else {
Err(SendError::Closed(msg))
}
}
}
impl<S: MultiCache, R: Sequence, T> Clone for Sender<S, R, T> {
fn clone(&self) -> Self {
Sender {
half: self.half.as_ref().and_then(Half::try_clone),
}
}
}
impl<S: Sequence, R: Sequence, T> Receiver<S, R, T> {
pub fn is_closed(&self) -> bool {
self.half.as_ref().map_or(true, Half::is_closed)
}
pub fn close(&mut self) {
self.half.as_mut().map_or((), Half::close)
}
pub fn try_recv(&mut self) -> Result<Option<T>, RecvError> {
if let Some(half) = &mut self.half {
match half.try_advance(()) {
Ok(msg) => Ok(Some(msg)),
Err(AdvanceError::BufferFull(())) => Err(RecvError),
Err(AdvanceError::Closed(())) => Ok(None),
}
} else {
Ok(None)
}
}
}
impl<S: Sequence, R: MultiCache, T> Clone for Receiver<S, R, T> {
fn clone(&self) -> Self {
Receiver {
half: self.half.as_ref().and_then(Half::try_clone)
}
}
}
|
#[doc = "Reader of register DDRPHYC_BISTGSR"]
pub type R = crate::R<u32, super::DDRPHYC_BISTGSR>;
#[doc = "Reader of field `BDONE`"]
pub type BDONE_R = crate::R<bool, bool>;
#[doc = "Reader of field `BACERR`"]
pub type BACERR_R = crate::R<bool, bool>;
#[doc = "Reader of field `BDXERR`"]
pub type BDXERR_R = crate::R<bool, bool>;
#[doc = "Reader of field `PARBER`"]
pub type PARBER_R = crate::R<u8, u8>;
#[doc = "Reader of field `TPDBER`"]
pub type TPDBER_R = crate::R<u8, u8>;
#[doc = "Reader of field `DMBER`"]
pub type DMBER_R = crate::R<u8, u8>;
#[doc = "Reader of field `RASBER`"]
pub type RASBER_R = crate::R<u8, u8>;
#[doc = "Reader of field `CASBER`"]
pub type CASBER_R = crate::R<u8, u8>;
impl R {
#[doc = "Bit 0 - BDONE"]
#[inline(always)]
pub fn bdone(&self) -> BDONE_R {
BDONE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - BACERR"]
#[inline(always)]
pub fn bacerr(&self) -> BACERR_R {
BACERR_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - BDXERR"]
#[inline(always)]
pub fn bdxerr(&self) -> BDXERR_R {
BDXERR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bits 20:21 - PARBER"]
#[inline(always)]
pub fn parber(&self) -> PARBER_R {
PARBER_R::new(((self.bits >> 20) & 0x03) as u8)
}
#[doc = "Bits 22:23 - TPDBER"]
#[inline(always)]
pub fn tpdber(&self) -> TPDBER_R {
TPDBER_R::new(((self.bits >> 22) & 0x03) as u8)
}
#[doc = "Bits 24:27 - DMBER"]
#[inline(always)]
pub fn dmber(&self) -> DMBER_R {
DMBER_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 28:29 - RASBER"]
#[inline(always)]
pub fn rasber(&self) -> RASBER_R {
RASBER_R::new(((self.bits >> 28) & 0x03) as u8)
}
#[doc = "Bits 30:31 - CASBER"]
#[inline(always)]
pub fn casber(&self) -> CASBER_R {
CASBER_R::new(((self.bits >> 30) & 0x03) as u8)
}
}
|
mod text;
mod util;
use instant::Instant;
use wasm_bindgen::prelude::*;
use yew::events::IKeyboardEvent;
use yew::{html, Component, ComponentLink, Html, ShouldRender};
#[wasm_bindgen(module = "/module.mjs")]
extern "C" {
type Chart;
#[wasm_bindgen(constructor)]
fn new() -> Chart;
#[wasm_bindgen(method)]
fn init(this: &Chart, arg: String);
#[wasm_bindgen(method)]
fn update(this: &Chart, accuracy: usize, typing_speed: usize);
}
pub struct Model {
text: String,
input: String,
text_list: Vec<String>,
text_list_index: usize,
timer: Instant,
elapsed_time: f64,
result: String,
chart: Chart,
}
pub enum Msg {
GetInput(String),
Next,
Nope,
}
impl Component for Model {
type Message = Msg;
type Properties = ();
fn create(_: Self::Properties, _: ComponentLink<Self>) -> Self {
let content = text::texts();
Model {
text: "Press Enter to Start".into(),
input: "".into(),
text_list: util::manufacture_file(&content),
text_list_index: 0,
timer: Instant::now(),
elapsed_time: 0_f64,
result: "".into(),
chart: Chart::new(),
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::GetInput(new_value) => {
self.input = new_value;
true
}
Msg::Next => {
//Get elapsed time and start new timer
self.elapsed_time = self.timer.elapsed().as_secs_f64();
self.timer = Instant::now();
//Check
let accuracy = util::get_accuracy(
&self.text_list.get(self.text_list_index).unwrap(),
&self.input,
);
let typing_speed = util::get_typing_speed(&self.input, self.elapsed_time);
self.result = format!("{}% {}", accuracy, typing_speed);
//If first time, init the chart
if self.text_list_index == 0 {
self.chart.init("#chart".into());
}
//Change list_index
self.text_list_index += 1;
if self.text_list_index >= self.text_list.len() - 1 {
self.text_list_index = 0;
}
//init
self.input = "".into();
self.text = self.text_list.get(self.text_list_index).unwrap().into();
//Update chart
self.chart.update(accuracy, typing_speed);
true
}
Msg::Nope => false,
}
}
fn view(&self) -> Html<Self> {
html! {
<div class="container">
<div>
<label for="mainInput">{&self.text}</label>
</div>
<div>
<input
type="text"
id="mainInput"
value=&self.input
oninput=|e| Msg::GetInput(e.value)
onkeypress=|e| {
if e.key() == "Enter" {Msg::Next} else {Msg::Nope}}/>
</div>
<div>
<label id="result">{&self.result}</label>
</div>
<div>
<canvas id="chart"></canvas>
</div>
</div>
}
}
}
|
extern crate asn1_der;
use ::asn1_der::{ Asn1DerError, DerObject, DerTag, DerValue };
const RANDOM: &[u8] = include_bytes!("rand.dat");
#[test]
fn test_ok() {
// Test (de-)serialization
fn test((bytes, object): &(&[u8], DerObject)) {
// Test deserialization
let deserialized = DerObject::deserialize(bytes.iter()).unwrap();
assert_eq!(object, &deserialized);
// Test length prediction
assert_eq!(deserialized.serialized_len(), bytes.len());
// Test serialization
let mut target = vec![0u8; bytes.len()];
deserialized.serialize(target.iter_mut()).unwrap();
assert_eq!(*bytes, target.as_slice());
}
// Test vectors
[
(
b"\x05\x00".as_ref(),
DerObject{ tag: DerTag::from(0x05), value: DerValue::from(Vec::new()) }
),
(
b"\x04\x02\x37\xe4".as_ref(),
DerObject {
tag: DerTag::from(0x04),
value: DerValue::from(b"\x37\xe4".to_vec())
}
),
(
RANDOM,
DerObject{ tag: DerTag::from(0x04), value: DerValue::from(RANDOM[5..].to_vec()) }
)
].iter().for_each(test);
}
#[test]
fn test_err() {
// Test (de-)serialization
fn test((bytes, error): &(&[u8], Asn1DerError)) {
assert_eq!(DerObject::deserialize(bytes.iter()).unwrap_err(), *error);
}
// Test invalid length-encodings and payload lengths
[
// Invalid complex length
(b"\x00\x80".as_ref(), Asn1DerError::InvalidEncoding),
// Invalid use of a complex length
(b"\xaf\x81\x7f".as_ref(), Asn1DerError::InvalidEncoding),
// Incomplete length
(b"\xbe\x81".as_ref(), Asn1DerError::LengthMismatch),
// Incomplete length
(b"\xd7\x84\x01\x00\x00".as_ref(), Asn1DerError::LengthMismatch),
// Incomplete value
(b"\x0c\x09\x54\x65\x73\x74\x6F\x6C\x6F\x70".as_ref(), Asn1DerError::LengthMismatch),
// Complex length > 2^64 - 1
(b"\x77\x89\x01\x00\x00\x00\x00\x00\x00\x00\x00".as_ref(), Asn1DerError::Unsupported),
// Excessive length announcement
#[cfg(target_pointer_width = "64")]
(b"\x9d\xf7\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x43\x9d\x01\x00\x00\x00\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d".as_ref(), Asn1DerError::LengthMismatch),
#[cfg(target_pointer_width = "32")]
(b"\x9d\xf7\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x43\x9d\x01\x00\x00\x00\x9d\x9d\x9d\x9d\x9d\x9d\x9d\x9d".as_ref(), Asn1DerError::Unsupported)
].iter().for_each(test);
} |
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use cosmwasm_std::{
HumanAddr,
};
#[derive(Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct InitMsg {
pub poll: String,
pub duration: u64,
pub early_results_allowed: bool,
}
#[derive(Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct HandleMsg {
pub vote: Option<bool>,
pub delegate: Option<HumanAddr>,
}
#[derive(Serialize, Deserialize, Clone, PartialEq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum QueryMsg {
GetPoll {},
GetTally {},
}
// We define a custom struct for each query response
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct TallyResponse {
pub count: i32,
}
// success or failure response
#[derive(Serialize, Deserialize, Debug, JsonSchema)]
pub enum ResponseStatus {
Success,
Failure,
}
// Responses from handle functions
#[derive(Serialize, Deserialize, Debug, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum HandleAnswer {
// response from vote attempt
Ballot {
/// success or failure
status: ResponseStatus,
/// execution description
message: String,
// New vote
#[serde(skip_serializing_if = "Option::is_none")]
vote: Option<bool>,
// Address of entity to which vote was delegated, called a delegate
delegate: Option<HumanAddr>,
},
// generic status response
Status {
/// success or failure
status: ResponseStatus,
/// execution description
message: String,
},
}
|
#![recursion_limit = "256"]
use assert_json_diff::assert_json_eq;
use chrono::{DateTime, NaiveDateTime, Utc};
use kube_derive::CustomResource;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
// See `crd_derive_schema` example for how the schema generated from this struct affects defaulting and validation.
#[derive(CustomResource, Serialize, Deserialize, Debug, PartialEq, Clone, JsonSchema)]
#[kube(
group = "clux.dev",
version = "v1",
kind = "Foo",
category = "clux",
namespaced,
derive = "PartialEq",
shortname = "fo",
shortname = "f"
)]
#[serde(rename_all = "camelCase")]
struct FooSpec {
non_nullable: String,
#[serde(default = "default_value")]
non_nullable_with_default: String,
#[serde(skip_serializing_if = "Option::is_none")]
nullable_skipped: Option<String>,
nullable: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default = "default_nullable")]
nullable_skipped_with_default: Option<String>,
#[serde(default = "default_nullable")]
nullable_with_default: Option<String>,
// Using feature `chrono`
timestamp: DateTime<Utc>,
/// This is a complex enum with a description
complex_enum: ComplexEnum,
/// This is a untagged enum with a description
untagged_enum_person: UntaggedEnumPerson,
}
fn default_value() -> String {
"default_value".into()
}
fn default_nullable() -> Option<String> {
Some("default_nullable".into())
}
#[derive(CustomResource, Deserialize, Serialize, Clone, Debug, JsonSchema)]
#[kube(group = "clux.dev", version = "v1", kind = "Flattening")]
pub struct FlatteningSpec {
foo: String,
#[serde(flatten)]
arbitrary: HashMap<String, serde_json::Value>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, JsonSchema)]
#[serde(rename_all = "camelCase")]
#[allow(clippy::enum_variant_names)]
enum ComplexEnum {
/// First variant with an int
VariantOne { int: i32 },
/// Second variant with an String
VariantTwo { str: String },
/// Third variant which doesn't has an attribute
VariantThree {},
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, JsonSchema)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
enum UntaggedEnumPerson {
GenderAndAge(GenderAndAge),
GenderAndDateOfBirth(GenderAndDateOfBirth),
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, JsonSchema)]
#[serde(rename_all = "camelCase")]
struct GenderAndAge {
/// Gender of the person
gender: Gender,
/// Age of the person in years
age: i32,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, JsonSchema)]
#[serde(rename_all = "camelCase")]
struct GenderAndDateOfBirth {
/// Gender of the person
gender: Gender,
/// Date of birth of the person as ISO 8601 date
date_of_birth: String,
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, JsonSchema)]
#[serde(rename_all = "PascalCase")]
enum Gender {
Female,
Male,
/// This variant has a comment!
Other,
}
#[test]
fn test_crd_name() {
use kube::core::CustomResourceExt;
assert_eq!("foos.clux.dev", Foo::crd_name());
}
#[test]
fn test_shortnames() {
use kube::core::CustomResourceExt;
assert_eq!(&["fo", "f"], Foo::shortnames());
}
#[test]
fn test_serialized_matches_expected() {
assert_json_eq!(
serde_json::to_value(Foo::new("bar", FooSpec {
non_nullable: "asdf".to_string(),
non_nullable_with_default: "asdf".to_string(),
nullable_skipped: None,
nullable: None,
nullable_skipped_with_default: None,
nullable_with_default: None,
timestamp: DateTime::from_utc(NaiveDateTime::from_timestamp_opt(0, 0).unwrap(), Utc),
complex_enum: ComplexEnum::VariantOne { int: 23 },
untagged_enum_person: UntaggedEnumPerson::GenderAndAge(GenderAndAge {
age: 42,
gender: Gender::Male,
})
}))
.unwrap(),
serde_json::json!({
"apiVersion": "clux.dev/v1",
"kind": "Foo",
"metadata": {
"name": "bar",
},
"spec": {
"nonNullable": "asdf",
"nonNullableWithDefault": "asdf",
"nullable": null,
"nullableWithDefault": null,
"timestamp": "1970-01-01T00:00:00Z",
"complexEnum": {
"variantOne": {
"int": 23
}
},
"untaggedEnumPerson": {
"age": 42,
"gender": "Male"
}
}
})
)
}
#[test]
fn test_crd_schema_matches_expected() {
use kube::core::CustomResourceExt;
assert_json_eq!(
Foo::crd(),
serde_json::json!({
"apiVersion": "apiextensions.k8s.io/v1",
"kind": "CustomResourceDefinition",
"metadata": {
"name": "foos.clux.dev"
},
"spec": {
"group": "clux.dev",
"names": {
"categories": ["clux"],
"kind": "Foo",
"plural": "foos",
"shortNames": ["fo", "f"],
"singular": "foo"
},
"scope": "Namespaced",
"versions": [
{
"name": "v1",
"served": true,
"storage": true,
"additionalPrinterColumns": [],
"schema": {
"openAPIV3Schema": {
"description": "Auto-generated derived type for FooSpec via `CustomResource`",
"properties": {
"spec": {
"properties": {
"nonNullable": {
"type": "string"
},
"nonNullableWithDefault": {
"default": "default_value",
"type": "string"
},
"nullableSkipped": {
"nullable": true,
"type": "string"
},
"nullable": {
"nullable": true,
"type": "string"
},
"nullableSkippedWithDefault": {
"default": "default_nullable",
"nullable": true,
"type": "string"
},
"nullableWithDefault": {
"default": "default_nullable",
"nullable": true,
"type": "string"
},
"timestamp": {
"type": "string",
"format": "date-time"
},
"complexEnum": {
"type": "object",
"properties": {
"variantOne": {
"type": "object",
"properties": {
"int": {
"type": "integer",
"format": "int32"
}
},
"required": ["int"],
"description": "First variant with an int"
},
"variantTwo": {
"type": "object",
"properties": {
"str": {
"type": "string"
}
},
"required": ["str"],
"description": "Second variant with an String"
},
"variantThree": {
"type": "object",
"description": "Third variant which doesn't has an attribute"
}
},
"oneOf": [
{
"required": ["variantOne"]
},
{
"required": ["variantTwo"]
},
{
"required": ["variantThree"]
}
],
"description": "This is a complex enum with a description"
},
"untaggedEnumPerson": {
"type": "object",
"properties": {
"age": {
"type": "integer",
"format": "int32",
"description": "Age of the person in years"
},
"dateOfBirth": {
"type": "string",
"description": "Date of birth of the person as ISO 8601 date"
},
"gender": {
"type": "string",
"enum": ["Female", "Male", "Other"],
"description": "Gender of the person"
}
},
"anyOf": [
{
"required": ["age", "gender"]
},
{
"required": ["dateOfBirth", "gender"]
}
],
"description": "This is a untagged enum with a description"
}
},
"required": [
"complexEnum",
"nonNullable",
"timestamp",
"untaggedEnumPerson"
],
"type": "object"
}
},
"required": [
"spec"
],
"title": "Foo",
"type": "object"
}
},
"subresources": {},
}
]
}
})
);
}
#[test]
fn flattening() {
use kube::core::CustomResourceExt;
let spec = &Flattening::crd().spec.versions[0]
.schema
.clone()
.unwrap()
.open_api_v3_schema
.unwrap()
.properties
.unwrap()["spec"];
assert_eq!(spec.x_kubernetes_preserve_unknown_fields, Some(true));
assert_eq!(spec.additional_properties, None);
}
|
// #![warn(clippy::all, clippy::pedantic, clippy::nursery, clippy::cargo)]
// #![allow(clippy::missing_const_for_fn)]
// #![allow(clippy::multiple_crate_versions)]
// #![allow(clippy::missing_errors_doc)]
// #![allow(clippy::module_name_repetitions)]
#[macro_use]
extern crate eyre;
#[macro_use]
extern crate clap;
#[macro_use]
extern crate custom_derive;
#[macro_use]
extern crate enum_derive;
mod cli;
mod io;
mod subcommands;
use crate::clap::Clap;
use cli::{
subcommands::{Command, Runner},
Opts,
};
use color_eyre::eyre::Report;
use displaydoc::Display;
use eyre::Context;
use thiserror::Error;
use tracing::instrument;
#[derive(Display, Error, Debug)]
#[non_exhaustive]
pub enum BinError {
/// IO error with `{path}`
Io {
#[source]
err: std::io::Error,
path: std::path::PathBuf,
},
/// encountered multiple errors
MultipleErrors,
}
#[instrument]
fn main() -> Result<(), Report> {
cli::install_tracing();
cli::setup_error_hook()?;
let opts: Opts = Opts::parse();
match opts.command {
Command::ConvertTree(args) => {
args.run().wrap_err("failed to convert taxonomy tree")?;
}
Command::CombineTrees(args) => {
args.run().wrap_err("failed to combine taxonomy trees")?;
}
Command::ConvertAbundance(args) => {
args.run()
.wrap_err("failed to convert taxonomy abundance data")?;
}
Command::CombineAbundances(args) => {
args.run().wrap_err("failed to combine abundance data")?;
}
}
Ok(())
}
|
use super::work::Work;
use super::*;
use enum_map::Enum;
use spin::RwLock;
use std::cmp;
use std::collections::BinaryHeap;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{Arc, Condvar, Mutex};
/// A unique work-packet id for each instance of work-packet
#[derive(Eq, PartialEq, Clone, Copy)]
struct WorkUID(u64);
impl WorkUID {
pub fn new() -> Self {
static WORK_UID: AtomicU64 = AtomicU64::new(0);
Self(WORK_UID.fetch_add(1, Ordering::Relaxed))
}
}
struct PrioritizedWork<C: Context> {
priority: usize,
work_uid: WorkUID,
work: Box<dyn Work<C>>,
}
impl<C: Context> PrioritizedWork<C> {
pub fn new(priority: usize, work: Box<dyn Work<C>>) -> Self {
Self {
priority,
work,
work_uid: WorkUID::new(),
}
}
}
impl<C: Context> PartialEq for PrioritizedWork<C> {
fn eq(&self, other: &Self) -> bool {
self.priority == other.priority && self.work_uid == other.work_uid
}
}
impl<C: Context> Eq for PrioritizedWork<C> {}
impl<C: Context> Ord for PrioritizedWork<C> {
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.priority.cmp(&other.priority)
}
}
impl<C: Context> PartialOrd for PrioritizedWork<C> {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
pub struct WorkBucket<C: Context> {
active: AtomicBool,
/// A priority queue
queue: RwLock<BinaryHeap<PrioritizedWork<C>>>,
monitor: Arc<(Mutex<()>, Condvar)>,
can_open: Option<Box<dyn Fn() -> bool>>,
}
unsafe impl<C: Context> Send for WorkBucket<C> {}
unsafe impl<C: Context> Sync for WorkBucket<C> {}
impl<C: Context> WorkBucket<C> {
pub fn new(active: bool, monitor: Arc<(Mutex<()>, Condvar)>) -> Self {
Self {
active: AtomicBool::new(active),
queue: Default::default(),
monitor,
can_open: None,
}
}
fn notify_one_worker(&self) {
let _guard = self.monitor.0.lock().unwrap();
self.monitor.1.notify_one()
}
fn notify_all_workers(&self) {
let _guard = self.monitor.0.lock().unwrap();
self.monitor.1.notify_all()
}
pub fn is_activated(&self) -> bool {
self.active.load(Ordering::SeqCst)
}
/// Enable the bucket
pub fn activate(&self) {
self.active.store(true, Ordering::SeqCst);
}
/// Test if the bucket is drained
pub fn is_empty(&self) -> bool {
self.queue.read().len() == 0
}
pub fn is_drained(&self) -> bool {
self.is_activated() && self.is_empty()
}
/// Disable the bucket
pub fn deactivate(&self) {
debug_assert!(
self.queue.read().is_empty(),
"Bucket not drained before close"
);
self.active.store(false, Ordering::SeqCst);
}
/// Add a work packet to this bucket, with a given priority
pub fn add_with_priority(&self, priority: usize, work: Box<dyn Work<C>>) {
self.queue
.write()
.push(PrioritizedWork::new(priority, work));
self.notify_one_worker(); // FIXME: Performance
}
/// Add a work packet to this bucket, with a default priority (1000)
pub fn add<W: Work<C>>(&self, work: W) {
self.add_with_priority(1000, box work);
}
pub fn bulk_add(&self, priority: usize, work_vec: Vec<Box<dyn Work<C>>>) {
{
let mut queue = self.queue.write();
for w in work_vec {
queue.push(PrioritizedWork::new(priority, w));
}
}
self.notify_all_workers(); // FIXME: Performance
}
/// Get a work packet (with the greatest priority) from this bucket
pub fn poll(&self) -> Option<Box<dyn Work<C>>> {
if !self.active.load(Ordering::SeqCst) {
return None;
}
self.queue.write().pop().map(|v| v.work)
}
pub fn set_open_condition(&mut self, pred: impl Fn() -> bool + 'static) {
self.can_open = Some(box pred);
}
pub fn update(&self) -> bool {
if let Some(can_open) = self.can_open.as_ref() {
if !self.is_activated() && can_open() {
self.activate();
return true;
}
}
false
}
}
#[derive(Debug, Enum, Copy, Clone, Eq, PartialEq)]
pub enum WorkBucketStage {
Unconstrained,
Prepare,
Closure,
// TODO: We only support final reference at the moment. If we have references of multiple strengths,
// we may need more than one buckets for each reference strength.
RefClosure,
RefForwarding,
Release,
Final,
}
|
use std::fs::File;
use std::io::BufWriter;
use std::path::Path;
const BYTES_PER_PIXEL: u32 = 3;
pub struct Bitmap {
pub width: u32,
pub height: u32,
pub pitch: usize,
pub pixels: Vec<u8>,
}
pub struct Font<'a> {
pub bitmap: Bitmap,
pub charmap: Vec<&'a str>,
pub character_width: u32,
pub character_height: u32,
}
pub fn png_load(data: &'static [u8]) -> Bitmap {
let decoder = png::Decoder::new(data);
let (info, mut reader) = decoder.read_info().unwrap();
let mut pixels = vec![0; info.buffer_size()];
reader.next_frame(&mut pixels).unwrap();
return Bitmap {
width: info.width,
height: info.height,
pitch: info.line_size,
pixels,
};
}
pub fn png_write(file: &str, bitmap: &Bitmap) {
let path = Path::new(file);
let file = File::create(path).unwrap();
let ref mut w = BufWriter::new(file);
let mut encoder = png::Encoder::new(w, bitmap.width, bitmap.height);
encoder.set_color(png::ColorType::RGB);
encoder.set_depth(png::BitDepth::Eight);
let mut writer = encoder.write_header().unwrap();
writer.write_image_data(&bitmap.pixels).unwrap();
}
pub fn bitmap_copy(src: &Bitmap, src_x: u32, src_y: u32, dest: &mut Bitmap, dest_x: u32, dest_y: u32, width: u32, height: u32) {
let mut src_row = ((src_x + src_y * src.width) * BYTES_PER_PIXEL) as usize;
let mut dest_row = ((dest_x + dest_y * dest.width) * BYTES_PER_PIXEL) as usize;
for _y in 0..height {
let mut src_ptr = src_row;
let mut dest_ptr = dest_row;
for _x in 0..width {
for _ in 0..3 {
dest.pixels[dest_ptr] = src.pixels[src_ptr];
src_ptr += 1;
dest_ptr += 1;
}
}
src_row += src.pitch as usize;
dest_row += dest.pitch as usize;
}
}
pub fn bitmap_text(font: &Font, dest: &mut Bitmap, mut dest_x: u32, dest_y: u32, text: &str) {
let characters_per_row = font.bitmap.width / font.character_width;
for character in text.chars() {
if character != ' ' {
let index = font.charmap.iter().position(|&c| c == character.to_string()).unwrap() as u32;
let u = index % characters_per_row;
let v = index / characters_per_row;
bitmap_copy(&font.bitmap, u * font.character_width, v * font.character_height, dest, dest_x, dest_y, font.character_width, font.character_height);
}
dest_x += font.character_width;
}
}
pub fn bitmap_fill(dest: &mut Bitmap, dest_x: u32, dest_y: u32, width: u32, height: u32, r: u8, g: u8, b: u8) {
let mut dest_row = ((dest_x + dest_y * dest.width) * BYTES_PER_PIXEL) as usize;
for _y in 0..height {
let mut dest_ptr = dest_row;
for _x in 0..width {
dest.pixels[dest_ptr] = r;
dest_ptr += 1;
dest.pixels[dest_ptr] = g;
dest_ptr += 1;
dest.pixels[dest_ptr] = b;
dest_ptr += 1;
}
dest_row += dest.pitch as usize;
}
}
|
//! Directly plug a `main` symbol instead of using `#[entry]`
#![deny(warnings)]
#![no_main]
#![no_std]
extern crate cortex_m_rt as rt;
extern crate panic_halt;
#[no_mangle]
pub unsafe extern "C" fn main() -> ! {
loop {}
}
|
mod map;
mod tag;
mod alt;
#[cfg(test)]
mod alt_tests;
pub use map::map;
pub use tag::tag;
pub use alt::alt;
|
#![cfg(feature = "bench")]
#![feature(test)]
#![allow(non_snake_case)]
#[macro_use]
extern crate jsontests_derive;
extern crate jsontests;
extern crate test;
#[derive(JsonTests)]
#[directory = "jsontests/res/files/vmPerformance"]
#[test_with = "jsontests::util::run_test"]
#[bench_with = "jsontests::util::run_bench"]
struct Performance;
|
use std::fs::File;
use std::io::prelude::*;
use yaml_rust::YamlLoader;
pub struct MeshServerOptions {
pub host: String,
pub port: u16,
pub meta_location: String,
pub volume_urls: Vec<String>,
pub replication: u16,
}
impl MeshServerOptions {
pub fn new(config_path: &str) -> Result<MeshServerOptions, &'static str> {
let mut s = String::new();
let mut f = File::open(config_path).unwrap();
f.read_to_string(&mut s).unwrap();
let docs = YamlLoader::load_from_str(&s).expect("Unable to load yaml configuration file.");
let doc = &docs[0];
let host = doc["host"]
.as_str()
.unwrap_or_else(|| "localhost")
.to_owned();
let port = doc["port"]
.as_str()
.unwrap_or_else(|| "3000")
.parse::<u16>()
.expect("invalid port");
let meta_location = doc["meta_location"]["path"]
.as_str()
.unwrap_or_else(|| "/tmp/meshfs")
.to_owned();
let mut volume_urls = Vec::new();
for v in doc["volumes"].as_vec().unwrap() {
volume_urls.push(String::from(v.as_str().unwrap()))
}
let replication = doc["replication"]
.as_str()
.unwrap_or_else(|| "2")
.parse::<u16>()
.expect("invalid replication value");
Ok(MeshServerOptions {
host,
port,
meta_location,
volume_urls,
replication,
})
}
}
|
#![feature(simd)]
#[simd]
#[derive(Clone, Copy, Debug)]
#[allow(non_camel_case_types)]
struct f32x3(f32, f32, f32);
fn main() {
let a = f32x3(1.0, 2.0, 3.0);
let b = f32x3(0.5, 1.5, 2.5);
let c = a - b;
println!("{:?}", c);
}
|
use crate::proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
pub(crate) fn expand(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let struct_name = &input.ident;
let trait_root_path = crate::root_path(&input);
// split generics into parts
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let serialize_data_impl = quote! {
impl #impl_generics #trait_root_path::SerializeData for #struct_name #ty_generics #where_clause
{}
};
serialize_data_impl.into()
} |
pub mod active_version;
pub mod active_versions;
pub mod constraint_kind;
pub mod constraints;
pub mod id;
pub mod inactive_version;
pub mod repository;
pub mod version_number;
use crate::entity::Entity;
use apllodb_storage_engine_interface::ColumnDataType;
use constraints::VersionConstraints;
use id::VersionId;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
/// Version.
///
/// A version belongs to a [Table](struct.Table.html).
/// A version directly has subset of records in the Table.
///
/// - The version `v_1` is created by apllodb CREATE TABLE command.
/// - Version `v_(current+1)` is created by apllodb ALTER TABLE command.
/// - Some of `v_1` ~ `v_current` are inactivated by apllodb ALTER TABLE command
/// if all the records in `v_i` can be migrated to `v_(current+1)` (auto upgrade).
/// - All of `v_1` ~ `v_current` are inactivated by apllodb DROP TABLE command.
///
/// Each version is purely immutable.
/// See: https://github.com/darwin-education/apllodb/wiki/Immutable-Schema-102:-Immutable-Schema-%E3%81%AB%E9%96%A2%E3%81%99%E3%82%8B%E5%AE%9A%E7%BE%A9%E3%83%BB%E5%AE%9A%E7%90%86
///
/// Version does not have Primary Key definition. It is held by VTable, assuming that Primary Key won't be changed.
///
/// Version does not have useful methods because you should access to version via
/// [ActiveVersion](foobar.html) or [InactiveVersion](foobar.html), both of which have different behavior.
#[derive(Clone, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)]
pub(crate) struct Version {
id: VersionId,
column_data_types: Vec<ColumnDataType>,
constraints: VersionConstraints,
}
impl Entity for Version {
type Id = VersionId;
fn id(&self) -> &Self::Id {
&self.id
}
}
impl Ord for Version {
fn cmp(&self, other: &Self) -> Ordering {
self.id.version_number.cmp(&other.id.version_number)
}
}
impl PartialOrd for Version {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
|
use sudo_test::{Command, Env, TextFile};
use crate::Result;
#[test]
fn it_works() -> Result<()> {
let env = Env("").build()?;
Command::new("su")
.args(["-c", "true"])
.output(&env)?
.assert_success()?;
let output = Command::new("su").args(["-c", "false"]).output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
Ok(())
}
#[test]
fn pass_to_shell_via_c_flag() -> Result<()> {
let shell_path = "/root/my-shell";
let my_shell = "#!/bin/sh
echo $@";
let env = Env("")
.file(shell_path, TextFile(my_shell).chmod("100"))
.build()?;
let command = "command";
let output = Command::new("su")
.args(["-s", shell_path, "-c", command])
.output(&env)?
.stdout()?;
assert_eq!(format!("-c {command}"), output);
Ok(())
}
#[test]
fn when_specified_more_than_once_only_last_value_is_used() -> Result<()> {
let env = Env("").build()?;
let output = Command::new("su")
.args(["-c", "id"])
.args(["-c", "true"])
.output(&env)?;
assert!(output.status().success());
assert!(output.stderr().is_empty());
assert!(output.stdout()?.is_empty());
Ok(())
}
|
// vim: shiftwidth=2
use std::fs::OpenOptions;
use std::io::Write;
use std::process::Command;
use crate::keys::Layout;
fn convert_io_error<T>(whats_happening: &str, res: Result<T, std::io::Error>) -> Result<T, String> {
match res {
Ok(t) => Ok(t),
Err(e) => Err(format!("Error {}: {}", whats_happening, e))
}
}
fn convert_json_error<T>(whats_happening: &str, res: Result<T, serde_json::Error>) -> Result<T, String> {
match res {
Ok(t) => Ok(t),
Err(e) => Err(format!("Error {}: {}", whats_happening, e))
}
}
pub fn add_systemd_service(layout: &Layout) -> Result<(), String> {
write_layout_to_global_config(layout)?;
write_udev_rule()?;
write_systemd_service()?;
refresh_udev()?;
refresh_systemd()?;
Ok(())
}
fn write_layout_to_global_config(layout: &Layout) -> Result<(), String> {
let file_out = convert_io_error(
"saving layout to /etc/totalmapper.json",
OpenOptions::new()
.truncate(true).read(false).create(true).write(true)
.open("/etc/totalmapper.json")
)?;
let buffered_out = std::io::BufWriter::new(file_out);
convert_json_error(
"saving layout to /etc/totalmapper.json",
serde_json::to_writer_pretty(
buffered_out,
layout
)
)?;
Ok(())
}
pub fn write_udev_rule() -> Result<(), String> {
let path = "/etc/udev/rules.d/80-totalmapper.rules";
let mut out_file = match OpenOptions::new()
.truncate(true).read(false).create(true).write(true)
.open(path)
{
Err(err) => {
match err.kind() {
std::io::ErrorKind::PermissionDenied => {
return Err(format!("Permission denied writing to {}. You likely must run this sub-command as root.", path));
},
_ => return Err(format!("{}", err))
}
},
Ok(out_file) => out_file
};
match out_file.write(
"KERNEL==\"event*\", ACTION==\"add\", TAG+=\"systemd\", ENV{SYSTEMD_WANTS}=\"totalmapper@%N.service\"\n".as_bytes()
) {
Err(err) => return Err(format!("{}", err)),
Ok(_) => ()
};
Ok(())
}
pub fn write_systemd_service() -> Result<(), String> {
let path = "/etc/systemd/system/totalmapper@.service";
let mut out_file = match OpenOptions::new()
.truncate(true).read(false).create(true).write(true)
.open(path)
{
Err(err) => {
match err.kind() {
std::io::ErrorKind::PermissionDenied => {
return Err(format!("Permission denied writing to {}. You likely must run this sub-command as root.", path));
},
_ => return Err(format!("{}", err))
}
},
Ok(out_file) => out_file
};
match out_file.write(
"[Unit]\n\
StopWhenUnneeded=true\n\
Description=Totalmapper\n\
\n\
[Service]\n\
Type=simple\n\
User=nobody\n\
Group=input\n\
ExecStart=/usr/bin/totalmapper remap --layout-file /etc/totalmapper.json --only-if-keyboard --dev-file /%I\n".as_bytes()
) {
Err(err) => return Err(format!("{}", err)),
Ok(_) => ()
};
Ok(())
}
pub fn refresh_udev() -> Result<(), String> {
match Command::new("/usr/bin/udevadm").args(&["control", "--reload"]).status() {
Err(e) => Err(format!("Failed to run udevadm: {}", e)),
Ok(_) => Ok(())
}?;
match Command::new("/usr/bin/udevadm").args(&["trigger"]).output() {
Err(e) => Err(format!("Failed to run udevadm: {}", e)),
Ok(_) => Ok(())
}?;
Ok(())
}
pub fn refresh_systemd() -> Result<(), String> {
match Command::new("/usr/bin/systemctl").args(&["daemon-reload"]).status() {
Err(e) => Err(format!("Failed to reload systemd: {}", e)),
Ok(_) => Ok(())
}?;
Ok(())
}
|
use std::io::{self, Stdout, stdout, stdin};
use termion::{TermRead, IntoRawMode, RawTerminal};
use super::*;
/// The default for `Context.word_fn`.
pub fn get_buffer_words(buf: &Buffer) -> Vec<(usize, usize)> {
let mut res = Vec::new();
let mut word_start = None;
let mut just_had_backslash = false;
for (i, &c) in buf.chars().enumerate() {
if c == '\\' {
just_had_backslash = true;
continue;
}
if let Some(start) = word_start {
if c == ' ' && !just_had_backslash {
res.push((start, i));
word_start = None;
}
} else {
if c != ' ' {
word_start = Some(i);
}
}
just_had_backslash = false;
}
if let Some(start) = word_start {
res.push((start, buf.num_chars()));
}
res
}
pub struct Context {
pub history: Vec<Buffer>,
pub completer: Option<Box<Completer>>,
pub word_fn: Box<Fn(&Buffer) -> Vec<(usize, usize)>>,
}
impl Context {
pub fn new() -> Self {
Context {
history: vec![],
completer: None,
word_fn: Box::new(get_buffer_words),
}
}
/// Creates an `Editor` and feeds it keypresses from stdin until the line is entered.
/// The output is stdout.
/// The returned line has the newline removed.
/// Before returning, will revert all changes to the history buffers.
pub fn read_line<P: Into<String>>(&mut self,
prompt: P,
mut handler: &mut EventHandler<RawTerminal<Stdout>>)
-> io::Result<String> {
let res = {
let stdin = stdin();
let stdout = stdout().into_raw_mode().unwrap();
let mut ed = try!(Editor::new(stdout, prompt.into(), self));
for c in stdin.keys() {
if try!(ed.handle_key(c.unwrap(), handler)) {
break;
}
}
Ok(ed.into())
};
self.revert_all_history();
res
}
pub fn revert_all_history(&mut self) {
for buf in &mut self.history {
buf.revert();
}
}
}
|
use std::fs;
fn main() {
// Read input file
let contents = fs::read_to_string("input.txt").expect("Failed reading input file");
// Challenge1
let count1: usize = contents
.split("\n\n")
.map(|x| calc_group1(x))
.sum();
println!("Challenge1: {}", count1);
// Challenge2
let count2: usize = contents
.split("\n\n")
.map(|x| calc_group2(x))
.sum();
println!("Challenge2: {}", count2);
}
fn calc_group1(group: &str) -> usize {
let mut counted = String::new();
for c in group.chars() {
if c == '\n' {
continue;
}
if !counted.contains(c) {
counted.push(c);
}
}
counted.len()
}
fn calc_group2(group: &str) -> usize {
let mut counted = String::new();
for person in group.lines() {
for c in person.chars() {
let mut consistent = true;
for person2 in group.lines().clone() {
if !person2.contains(c) {
consistent = false;
break;
}
}
if consistent && !counted.contains(c) {
counted.push(c);
}
}
}
counted.len()
}
|
use serde::Deserialize;
use common::error::Error;
use common::event::EventPublisher;
use common::result::Result;
use crate::domain::publication::{Image, Page, PublicationId, PublicationRepository};
#[derive(Deserialize)]
pub struct PageDto {
images: Vec<String>,
}
#[derive(Deserialize)]
pub struct UpdatePagesCommand {
pages: Vec<PageDto>,
}
impl UpdatePagesCommand {
pub fn validate(&self) -> Result<()> {
Ok(())
}
}
pub struct UpdatePages<'a> {
event_pub: &'a dyn EventPublisher,
publication_repo: &'a dyn PublicationRepository,
}
impl<'a> UpdatePages<'a> {
pub fn new(
event_pub: &'a dyn EventPublisher,
publication_repo: &'a dyn PublicationRepository,
) -> Self {
UpdatePages {
event_pub,
publication_repo,
}
}
pub async fn exec(
&self,
author_id: String,
publication_id: String,
cmd: UpdatePagesCommand,
) -> Result<()> {
cmd.validate()?;
let publication_id = PublicationId::new(&publication_id)?;
let mut publication = self.publication_repo.find_by_id(&publication_id).await?;
if publication.author_id().value() != author_id {
return Err(Error::new("publication", "unauthorized"));
}
let mut pages = Vec::new();
for (page_n, page) in cmd.pages.into_iter().enumerate() {
let mut images = Vec::new();
for image in page.images.into_iter() {
images.push(Image::new(image)?);
}
let mut page = Page::new(page_n as u32)?;
page.set_images(images)?;
pages.push(page);
}
publication.set_pages(pages)?;
self.publication_repo.save(&mut publication).await?;
self.event_pub
.publish_all(publication.base().events()?)
.await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::mocks;
#[tokio::test]
async fn valid() {
let c = mocks::container();
let uc = UpdatePages::new(c.event_pub(), c.publication_repo());
let author = mocks::author1();
let mut publication = mocks::publication1();
c.publication_repo().save(&mut publication).await.unwrap();
uc.exec(
author.base().id().to_string(),
publication.base().id().to_string(),
UpdatePagesCommand {
pages: vec![
PageDto {
images: vec![
"domain.com/image1".to_owned(),
"domain.com/image2".to_owned(),
],
},
PageDto {
images: vec![
"domain.com/image3".to_owned(),
"domain.com/image4".to_owned(),
],
},
PageDto {
images: vec!["domain.com/image5".to_owned()],
},
],
},
)
.await
.unwrap();
let publication = c
.publication_repo()
.find_by_id(&publication.base().id())
.await
.unwrap();
assert_eq!(publication.pages().len(), 3);
assert_eq!(c.event_pub().events().await.len(), 1);
}
#[tokio::test]
async fn invalid() {
let c = mocks::container();
let uc = UpdatePages::new(c.event_pub(), c.publication_repo());
let author = mocks::author1();
assert!(uc
.exec(
author.base().id().to_string(),
"#invalid".to_owned(),
UpdatePagesCommand {
pages: vec![
PageDto {
images: vec![
"domain.com/image1".to_owned(),
"domain.com/image2".to_owned()
],
},
PageDto {
images: vec![
"domain.com/image3".to_owned(),
"domain.com/image4".to_owned()
],
},
PageDto {
images: vec!["domain.com/image5".to_owned()],
},
],
},
)
.await
.is_err());
}
}
|
use std::collections::HashMap;
const START: i32 = 245182;
const END: i32 = 790572;
fn digits(n: i32) -> Vec<i32> {
let mut digits = Vec::<i32>::new();
assert!(0 <= n && n < 1000000);
let mut rest = n;
for _ in 0..6 {
digits.push(rest % 10);
rest /= 10;
}
digits.reverse();
digits
}
#[cfg(test)]
mod tests {
#[test]
fn test_digits() {
assert_eq!(crate::digits(0), [0,0,0,0,0,0]);
assert_eq!(crate::digits(123456), [1,2,3,4,5,6]);
assert_eq!(crate::digits(17), [0,0,0,0,1,7]);
}
}
fn is_increasing(digits: &[i32]) -> bool {
let mut last_digit = 0i32;
for &d in digits {
if d < last_digit {
return false;
}
last_digit = d;
}
true
}
fn has_double(digits: &[i32]) -> bool {
let mut last_digit = digits[0];
for &d in digits[1..].iter() {
if d == last_digit {
return true
}
last_digit = d
}
false
}
fn has_exact_double(digits: &[i32]) -> bool {
let mut counts: HashMap<i32, i32> = HashMap::new();
for &d in digits {
*counts.entry(d).or_insert(0) += 1
}
for &v in counts.values() {
if v == 2 {
return true;
}
}
false
}
fn main() {
let mut count = 0i32;
let mut count_exact = 0i32;
for n in START..=END {
let ds = digits(n);
if is_increasing(&ds) && has_double(&ds) {
count += 1;
if has_exact_double(&ds) {
count_exact += 1;
}
}
}
println!("{}", count);
println!("{}", count_exact);
}
|
// use std::collections::HashMap;
// use std::collections::HashSet;
use std::io::{self};
fn main() -> io::Result<()> {
let f = "test.txt";
// let f = "input.txt";
let vec: Vec<String> = std::fs::read_to_string(f)?
.lines()
.map(|x| x.to_string())
.collect();
let vec: Vec<Vec<char>> = vec
.into_iter()
.map(|x| x.replace("L", "#").chars().collect())
.collect();
for v in vec.iter() {
println!("{:?}", v);
}
let directions = vec![
(-1, -1),
(-1, 0),
(-1, 1),
(0, -1),
(0, 1),
(1, -1),
(1, 0),
(1, 1),
];
let occ = '#';
let free = 'L';
let floor = '.';
let mut after = vec.to_vec();
let mut before = vec.to_vec();
loop {
let mut changed = false;
for (i, row) in before.iter().enumerate() {
for (j, ch) in row.iter().enumerate() {
let ch = *ch;
if ch == floor {
continue;
}
let mut adj = 0;
for dir in directions.iter() {
let (horiz, vert) = dir;
let cj = j as i32 + horiz;
let ci = i as i32 + vert;
if ci < 0 || ci >= before.len() as i32 || cj < 0 || cj >= row.len() as i32 {
continue;
}
if before[ci as usize][cj as usize] == occ {
adj += 1;
}
}
if ch == occ && adj >= 4 {
after[i][j] = free;
changed = true;
}
if ch == free && adj == 0 {
after[i][j] = occ;
changed = true;
}
}
}
if !changed {
break;
}
// println!("New round");
// for v in after.iter() {
// println!("{:?}", v);
// }
before = after.to_vec();
}
let mut sum = 0;
for v in after.iter() {
for c in v.iter() {
if *c == '#' {
sum += 1;
}
}
}
println!("Occupied seats: {}", sum);
let mut after = vec.to_vec();
let mut before = vec.to_vec();
loop {
let mut changed = false;
for (i, row) in before.iter().enumerate() {
for (j, ch) in row.iter().enumerate() {
let ch = *ch;
if ch == floor {
continue;
}
let mut occ_count = 0;
for dir in directions.iter() {
let (horiz, vert) = dir;
let mut cj = j as i32 + horiz;
let mut ci = i as i32 + vert;
while ci >= 0 && ci < before.len() as i32 && cj >= 0 && cj < row.len() as i32 {
if before[ci as usize][cj as usize] == free {
break;
}
if before[ci as usize][cj as usize] == occ {
occ_count += 1;
break;
}
cj = cj + horiz;
ci = ci + vert;
}
}
if ch == occ && occ_count >= 5 {
after[i][j] = free;
changed = true;
}
if ch == free && occ_count == 0 {
after[i][j] = occ;
changed = true;
}
}
}
if !changed {
break;
}
// println!("New round");
// for v in after.iter() {
// println!("{:?}", v);
// }
before = after.to_vec();
}
let mut sum = 0;
for v in after.iter() {
for c in v.iter() {
if *c == occ {
sum += 1;
}
}
}
println!("Occupied seats: {}", sum);
Ok(())
}
|
#[derive(Debug, Serialize, Deserialize)]
pub struct Claims {
pub sub: String,
pub exp: i64,
}
|
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
fn implement_tensor_construction() {
fn implement_macro(supported_types: &[(&str, &str)], f: &mut File) {
f.write_all(b"
/// Populate either known size tensor or populate a
/// tensor then derive the size based on populated Vec
#[macro_export]
macro_rules! populate_tensor {").expect("Fail to auto implement populate_tensor macro header");
for tar in supported_types {
f.write_all(format!("
({ty}, $size: expr, $cb: expr) => (
$crate::generate_sized_{ty}_tensor($size, $cb)
);
({ty}, $generator: expr) => (
{{
$crate::generate_unsized_{ty}_tensor($generator)
}}
);
",
ty=tar.1
).as_bytes()).expect("Fail to auto implement populate_tensor code macro");
}
f.write_all(b"}").expect("Fail to auto implement populate_tensor macro close brace");
};
fn implement_private_fn(supported_types: &[(&str, &str)], f: &mut File) {
for tar in supported_types {
f.write_all(format!("
/// Perform tensor construction for data type {ty}. It'll create an empty 1d {ts}
/// then populate it by call the callback function on each element of mutable data ref
/// along with the current index of given data.
pub fn generate_sized_{ty}_tensor(size: usize, cb: impl FnMut((usize, &mut {ty}))) -> {ts} {{
let mut tensor = {ts}::new_with_size_1d(size);
tensor.data_mut().iter_mut().enumerate().for_each(cb);
tensor
}}
/// Perform data populate by using generator closure which expected to return a
/// slice of the {ty}. It then perform deep copy of each
pub fn generate_unsized_{ty}_tensor<'a>(mut generator: impl FnMut() -> &'a [{ty}]) -> {ts} {{
{ts}::from(generator())
}}
",
ts=tar.0,
ty=tar.1,
).as_bytes()).expect("Auto implementation error on implementing tensor generating private functions");
}
};
let out_dir = env::var("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("pop_tensor.rs");
let mut f = File::create(&dest_path).unwrap();
let supported_types = [
("ByteTensor", "u8"),
("CharTensor", "i8"),
("FloatTensor", "f32"),
("DoubleTensor", "f64"),
("IntTensor", "i32"),
("LongTensor", "i64"),
("ShortTensor", "i16")
];
implement_macro(&supported_types, &mut f);
implement_private_fn(&supported_types, &mut f);
}
fn main() {
implement_tensor_construction();
println!(r"cargo:rustc-link-search=clib");
} |
fn main() {
let name:&str="Ricky";
let address:&str = "Tangerang City";
let hobby:&str = "sports and music";
println!("my name : {} im from :{}, my hobby : {}",name,address,hobby);
}
|
use std::fmt;
#[derive(Clone)]
struct Trouble {
number: u32,
}
impl Trouble {
fn new(number: u32) -> Trouble {
Trouble {
number: number,
}
}
fn get_number(&self) -> u32 {
self.number
}
}
impl fmt::Display for Trouble {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[Trouble {}]", self.number)
}
}
trait Support : SupportClone {
fn resolve(&self, trouble: Trouble) -> bool;
fn support(&self, trouble: Trouble);
fn set_next(&mut self, next: Box<Support>) -> Box<Support>;
}
trait SupportClone {
fn clone_box(&self) -> Box<Support>;
}
impl<T> SupportClone for T where T: 'static + Support + Clone {
fn clone_box(&self) -> Box<Support> {
Box::new(self.clone())
}
}
impl Clone for Box<Support> {
fn clone(&self) -> Box<Support> {
self.clone_box()
}
}
#[derive(Clone)]
struct NoSupport {
name: String,
next: Option<Box<Support>>,
}
impl NoSupport {
fn new(name: String) -> NoSupport {
NoSupport {
name: name,
next: None,
}
}
fn done(&self, trouble: Trouble) {
println!("{} is resolved by {}.", trouble, self);
}
fn fail(&self, trouble: Trouble) {
println!("{} cannot be resolved.", trouble);
}
}
impl Support for NoSupport {
fn resolve(&self, trouble: Trouble) -> bool {
false
}
fn support(&self, trouble: Trouble) {
if self.resolve(trouble.clone()) {
self.done(trouble);
} else {
match self.next.clone() {
Some(n) => n.support(trouble),
None => self.fail(trouble),
}
}
}
fn set_next(&mut self, next: Box<Support>) -> Box<Support> {
self.next = Some(next.clone());
next
}
}
impl fmt::Display for NoSupport {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.name)
}
}
#[derive(Clone)]
struct LimitSupport {
name: String,
next: Option<Box<Support>>,
limit: u32,
}
impl LimitSupport {
fn new(name: String, limit: u32) -> LimitSupport {
LimitSupport {
name: name,
next: None,
limit: limit,
}
}
fn done(&self, trouble: Trouble) {
println!("{} is resolved by {}.", trouble, self);
}
fn fail(&self, trouble: Trouble) {
println!("{} cannot be resolved.", trouble);
}
}
impl Support for LimitSupport {
fn resolve(&self, trouble: Trouble) -> bool {
if trouble.get_number() < self.limit {
true
} else {
false
}
}
fn support(&self, trouble: Trouble) {
if self.resolve(trouble.clone()) {
self.done(trouble);
} else {
match self.next.clone() {
Some(n) => n.support(trouble),
None => self.fail(trouble),
}
}
}
fn set_next(&mut self, next: Box<Support>) -> Box<Support> {
self.next = Some(next.clone());
next
}
}
impl fmt::Display for LimitSupport {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.name)
}
}
#[derive(Clone)]
struct OddSupport {
name: String,
next: Option<Box<Support>>,
}
impl OddSupport {
fn new(name: String) -> OddSupport {
OddSupport {
name: name,
next: None,
}
}
fn done(&self, trouble: Trouble) {
println!("{} is resolved by {}.", trouble, self);
}
fn fail(&self, trouble: Trouble) {
println!("{} cannot be resolved.", trouble);
}
}
impl Support for OddSupport {
fn resolve(&self, trouble: Trouble) -> bool {
if trouble.get_number() % 2 == 1 {
true
} else {
false
}
}
fn support(&self, trouble: Trouble) {
if self.resolve(trouble.clone()) {
self.done(trouble);
} else {
match self.next.clone() {
Some(n) => n.support(trouble),
None => self.fail(trouble),
}
}
}
fn set_next(&mut self, next: Box<Support>) -> Box<Support> {
self.next = Some(next.clone());
next
}
}
impl fmt::Display for OddSupport {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.name)
}
}
#[derive(Clone)]
struct SpecialSupport {
name: String,
next: Option<Box<Support>>,
number: u32,
}
impl SpecialSupport {
fn new(name: String, number: u32) -> SpecialSupport {
SpecialSupport {
name: name,
next: None,
number: number,
}
}
fn done(&self, trouble: Trouble) {
println!("{} is resolved by {}.", trouble, self);
}
fn fail(&self, trouble: Trouble) {
println!("{} cannot be resolved.", trouble);
}
}
impl Support for SpecialSupport {
fn resolve(&self, trouble: Trouble) -> bool {
if trouble.get_number() == self.number {
true
} else {
false
}
}
fn support(&self, trouble: Trouble) {
if self.resolve(trouble.clone()) {
self.done(trouble);
} else {
match self.next.clone() {
Some(n) => n.support(trouble),
None => self.fail(trouble),
}
}
}
fn set_next(&mut self, next: Box<Support>) -> Box<Support> {
self.next = Some(next.clone());
next
}
}
impl fmt::Display for SpecialSupport {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.name)
}
}
fn main() {
let mut alice = Box::new(NoSupport::new("Alice".to_string()));
let mut bob = Box::new(LimitSupport::new("Bob".to_string(), 100));
let mut charlie = Box::new(SpecialSupport::new("Charlie".to_string(), 429));
let mut diana = Box::new(LimitSupport::new("Diana".to_string(), 200));
let mut elmo = Box::new(OddSupport::new("Elmo".to_string()));
let fred = Box::new(LimitSupport::new("fred".to_string(), 300));
elmo.set_next(fred);
diana.set_next(elmo);
charlie.set_next(diana);
bob.set_next(charlie);
alice.set_next(bob);
let mut i = 0;
while i < 500 {
alice.support(Trouble::new(i));
i += 33;
}
}
|
use crate::db::users::User;
use crate::routes::json_generic::JsonGeneric;
use rocket::response::status::BadRequest;
use rocket_contrib::json::Json;
// Register a new device
#[post("/", format = "json", data = "<request_data>")]
pub fn register(mut request_data: Json<User>) -> Result<Json<User>, BadRequest<Json<JsonGeneric>>> {
match register_do(&mut request_data) {
Ok(_) => Ok(request_data),
Err(e) => Err(JsonGeneric::new_bad_request_generic(e.to_string())),
}
}
// performs the registration
pub fn register_do(user_data: &mut User) -> Result<i32, String> {
// Generate random id from token
match user_data.generate_id() {
Ok(_) => {
// Check if the user exists, if he does, then return
let exists: bool = match user_data.exists() {
Ok(exists) => exists,
Err(e) => {
return Err(e.to_string());
}
};
// If the user record doesn't exist, then insert it
if exists == false {
match user_data.insert() {
Ok(_) => println!("fine"),
Err(e) => println!("{:?}", e),
};
} else {
return Err("user already exists".to_string());
}
}
Err(e) => {
let err = e.to_string();
return Err(err.to_string());
}
};
Ok(0)
}
|
#[doc = "Register `APB1RSTR2` reader"]
pub type R = crate::R<APB1RSTR2_SPEC>;
#[doc = "Register `APB1RSTR2` writer"]
pub type W = crate::W<APB1RSTR2_SPEC>;
#[doc = "Field `LPUART1RST` reader - Low-power UART 1 reset"]
pub type LPUART1RST_R = crate::BitReader<LPUART1RST_A>;
#[doc = "Low-power UART 1 reset\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LPUART1RST_A {
#[doc = "0: No effect"]
NoReset = 0,
#[doc = "1: Reset peripheral"]
Reset = 1,
}
impl From<LPUART1RST_A> for bool {
#[inline(always)]
fn from(variant: LPUART1RST_A) -> Self {
variant as u8 != 0
}
}
impl LPUART1RST_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPUART1RST_A {
match self.bits {
false => LPUART1RST_A::NoReset,
true => LPUART1RST_A::Reset,
}
}
#[doc = "No effect"]
#[inline(always)]
pub fn is_no_reset(&self) -> bool {
*self == LPUART1RST_A::NoReset
}
#[doc = "Reset peripheral"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == LPUART1RST_A::Reset
}
}
#[doc = "Field `LPUART1RST` writer - Low-power UART 1 reset"]
pub type LPUART1RST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, LPUART1RST_A>;
impl<'a, REG, const O: u8> LPUART1RST_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No effect"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1RST_A::NoReset)
}
#[doc = "Reset peripheral"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1RST_A::Reset)
}
}
#[doc = "Field `LPTIM2RST` reader - Low-power timer 2 reset"]
pub use LPUART1RST_R as LPTIM2RST_R;
#[doc = "Field `LPTIM3RST` reader - Low-power timer 3 reset"]
pub use LPUART1RST_R as LPTIM3RST_R;
#[doc = "Field `LPTIM2RST` writer - Low-power timer 2 reset"]
pub use LPUART1RST_W as LPTIM2RST_W;
#[doc = "Field `LPTIM3RST` writer - Low-power timer 3 reset"]
pub use LPUART1RST_W as LPTIM3RST_W;
impl R {
#[doc = "Bit 0 - Low-power UART 1 reset"]
#[inline(always)]
pub fn lpuart1rst(&self) -> LPUART1RST_R {
LPUART1RST_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 5 - Low-power timer 2 reset"]
#[inline(always)]
pub fn lptim2rst(&self) -> LPTIM2RST_R {
LPTIM2RST_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Low-power timer 3 reset"]
#[inline(always)]
pub fn lptim3rst(&self) -> LPTIM3RST_R {
LPTIM3RST_R::new(((self.bits >> 6) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Low-power UART 1 reset"]
#[inline(always)]
#[must_use]
pub fn lpuart1rst(&mut self) -> LPUART1RST_W<APB1RSTR2_SPEC, 0> {
LPUART1RST_W::new(self)
}
#[doc = "Bit 5 - Low-power timer 2 reset"]
#[inline(always)]
#[must_use]
pub fn lptim2rst(&mut self) -> LPTIM2RST_W<APB1RSTR2_SPEC, 5> {
LPTIM2RST_W::new(self)
}
#[doc = "Bit 6 - Low-power timer 3 reset"]
#[inline(always)]
#[must_use]
pub fn lptim3rst(&mut self) -> LPTIM3RST_W<APB1RSTR2_SPEC, 6> {
LPTIM3RST_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "APB1 peripheral reset register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1rstr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1rstr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct APB1RSTR2_SPEC;
impl crate::RegisterSpec for APB1RSTR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`apb1rstr2::R`](R) reader structure"]
impl crate::Readable for APB1RSTR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`apb1rstr2::W`](W) writer structure"]
impl crate::Writable for APB1RSTR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets APB1RSTR2 to value 0"]
impl crate::Resettable for APB1RSTR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::*;
#[test]
fn simple() {
let vm = VM::new();
assert_eq!("nil", format!("{}", vm.wrap(Object::Nil)));
assert_eq!("#undefined", format!("{}", vm.wrap(Object::Undef)));
assert_eq!("#t", format!("{}", vm.wrap(Object::True)));
assert_eq!("#f", format!("{}", vm.wrap(Object::False)));
assert_eq!(Object::Nil, Object::Nil);
assert_eq!(Object::Undef, Object::Undef);
assert_eq!(Object::True, Object::True);
assert_eq!(Object::False, Object::False);
assert_ne!(Object::Nil, Object::Undef);
assert_ne!(Object::Nil, Object::True);
assert_ne!(Object::Nil, Object::False);
assert_ne!(Object::Undef, Object::Nil);
assert_ne!(Object::Undef, Object::True);
assert_ne!(Object::Undef, Object::False);
assert_ne!(Object::True, Object::Nil);
assert_ne!(Object::True, Object::Undef);
assert_ne!(Object::True, Object::False);
assert_ne!(Object::False, Object::Nil);
assert_ne!(Object::False, Object::Undef);
assert_ne!(Object::False, Object::True);
assert_eq!(None, Object::Nil.as_int());
assert_eq!(None, Object::Undef.as_int());
assert_eq!(None, Object::True.as_int());
assert_eq!(None, Object::False.as_int());
assert_eq!(None, Object::Nil.as_float());
assert_eq!(None, Object::Undef.as_float());
assert_eq!(None, Object::True.as_float());
assert_eq!(None, Object::False.as_float());
assert_eq!(None, Object::Nil.as_bool());
assert_eq!(None, Object::Undef.as_bool());
assert_eq!(Some(true), Object::True.as_bool());
assert_eq!(Some(false), Object::False.as_bool());
}
#[test]
fn fixnums() {
let vm = VM::new();
assert_eq!("0", format!("{}", vm.wrap(vm.int(0))));
assert_eq!("-1", format!("{}", vm.wrap(vm.int(-1))));
assert_eq!("1", format!("{}", vm.wrap(vm.int(1))));
assert_eq!(
format!("{}", isize::MAX),
format!("{}", vm.wrap(vm.int(isize::MAX))),
);
assert_eq!(
format!("{}", isize::MIN),
format!("{}", vm.wrap(vm.int(isize::MIN))),
);
assert_eq!(vm.int(1), vm.int(1));
assert_ne!(vm.int(-1), vm.int(1));
assert_ne!(vm.int(1), Object::Nil);
assert_ne!(vm.int(1), Object::Undef);
assert_ne!(vm.int(1), Object::True);
assert_ne!(vm.int(1), Object::False);
assert_eq!(Some(1), vm.int(1).as_int());
assert_eq!(Some(-1), vm.int(-1).as_int());
assert_eq!(None, vm.int(1).as_float());
assert_eq!(None, vm.int(1).as_bool());
}
#[test]
fn floats() {
let vm = VM::new();
assert_eq!("0.0", format!("{}", vm.wrap(vm.float(0.0))));
assert_eq!("0.1", format!("{}", vm.wrap(vm.float(0.1))));
assert_eq!("123.45", format!("{}", vm.wrap(vm.float(123.45))));
assert_eq!(vm.float(1.0), vm.float(1.0));
assert_eq!(vm.float(-1.0), vm.float(-1.0));
assert_ne!(vm.float(-1.0), vm.int(-1));
assert_ne!(vm.float(1.0), Object::Nil);
assert_ne!(vm.float(1.0), Object::Undef);
assert_ne!(vm.float(1.0), Object::True);
assert_ne!(vm.float(1.0), Object::False);
assert_eq!(None, vm.float(2.3).as_int());
assert_eq!(Some(0.0), vm.float(0.0).as_float());
assert_eq!(Some(2.3), vm.float(2.3).as_float());
assert_eq!(None, vm.float(2.3).as_bool());
}
#[test]
fn conses() {
let mut vm = VM::new();
let a = vm.cons(vm.int(0), Object::Nil);
assert_eq!("(0)", format!("{}", vm.wrap(a)));
assert_eq!(Some(vm.int(0)), vm.car(a));
assert_eq!(Some(Object::Nil), vm.cdr(a));
let b = vm.cons(vm.int(1), a);
assert_eq!("(1 0)", format!("{}", vm.wrap(b)));
assert_eq!(Some(vm.int(1)), vm.car(b));
assert_eq!(Some(a), vm.cdr(b));
let c = vm.cons(vm.int(1), vm.int(0));
assert_eq!("(1 . 0)", format!("{}", vm.wrap(c)));
assert_eq!(Some(vm.int(1)), vm.car(c));
assert_eq!(Some(vm.int(0)), vm.cdr(c));
let d = vm.cons(vm.int(2), c);
assert_eq!("(2 1 . 0)", format!("{}", vm.wrap(d)));
assert_eq!(Some(vm.int(2)), vm.car(d));
assert_eq!(Some(c), vm.cdr(d));
assert_ne!(d, vm.int(2));
assert_ne!(d, Object::Nil);
assert_ne!(d, Object::Undef);
assert_ne!(d, Object::True);
assert_ne!(d, Object::False);
}
#[test]
fn vectors() {
let mut vm = VM::new();
let v = vm.vec(3);
assert_eq!("#(#undefined #undefined #undefined)", format!("{}", vm.wrap(v)));
assert_eq!(Some(Object::Undef), vm.vec_get(v, 0));
assert_eq!(Some(Object::Undef), vm.vec_get(v, 1));
assert_eq!(Some(Object::Undef), vm.vec_get(v, 2));
vm.vec_set(v, 0, vm.int(0)).unwrap();
vm.vec_set(v, 1, Object::Nil).unwrap();
vm.vec_set(v, 2, vm.float(2.3)).unwrap();
assert_eq!("#(0 nil 2.3)", format!("{}", vm.wrap(v)));
assert_eq!(Some(vm.int(0)), vm.vec_get(v, 0));
assert_eq!(Some(Object::Nil), vm.vec_get(v, 1));
assert_eq!(Some(vm.float(2.3)), vm.vec_get(v, 2));
}
|
use amethyst::ecs::{Entity, ReadStorage};
use crate::components::Name;
pub(crate) fn get_name<'s>(entity: Entity, default: &'s str,
name_storage: &'s ReadStorage<'s, Name>) -> &'s str{
match name_storage.get(entity) {
Some(name) => name.name(),
_ => default
}
}
|
extern crate pythonvm;
use std::path::PathBuf;
use std::env;
use pythonvm::{MockEnvProxy, PyResult, run_file};
#[test]
fn test_hello_world() {
let mut reader: &[u8] = b"\xee\x0c\r\n\xb0\x92\x0fW\x15\x00\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00s\x0e\x00\x00\x00e\x00\x00d\x00\x00\x83\x01\x00\x01d\x01\x00S)\x02z\x0bHello worldN)\x01\xda\x05print\xa9\x00r\x02\x00\x00\x00r\x02\x00\x00\x00\xfa\x16examples/helloworld.py\xda\x08<module>\x01\x00\x00\x00s\x00\x00\x00\x00";
let mut path = PathBuf::new();
path.push(env::current_dir().unwrap());
path.push("pythonlib/");
let envproxy = MockEnvProxy::new(path);
let (processor, result) = run_file(&mut reader, envproxy).unwrap();
if let PyResult::Return(_) = result {
assert_eq!(*processor.envproxy.stdout_content.lock().unwrap(), b"Hello world\n");
}
else {
panic!(format!("Exited with: {:?}", result))
}
}
|
use crate::neuron::activations::Activation;
use ndarray::Array1;
pub fn tanh_activation(transfer: &Array1<f32>) -> Array1<f32> {
transfer.map(|&x| (f32::exp(x) - f32::exp(-x)) / (f32::exp(x) + f32::exp(-x)))
}
pub fn tanh_derivative(transfer: &Array1<f32>) -> Array1<f32> {
1. - tanh_activation(transfer).map(|x| x * x)
}
pub fn tanh() -> Activation {
Activation::new(tanh_activation, tanh_derivative)
}
|
use crate::command_prelude::*;
use cargo::ops;
pub fn cli() -> App {
subcommand("login")
.about(
"Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.",
)
.arg_quiet()
.arg(Arg::new("token"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
.after_help("Run `cargo help login` for more detailed information.\n")
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {
ops::registry_login(
config,
args.get_one::<String>("token").cloned(),
args.get_one::<String>("registry").cloned(),
)?;
Ok(())
}
|
pub mod command_is_executing;
pub mod errors;
pub mod input;
pub mod install;
pub mod ipc;
pub mod os_input_output;
pub mod pty_bus;
pub mod screen;
pub mod utils;
pub mod wasm_vm;
use std::cell::RefCell;
use std::path::PathBuf;
use std::sync::mpsc;
use std::thread;
use std::{collections::HashMap, fs};
use std::{
collections::HashSet,
env,
io::Write,
str::FromStr,
sync::{Arc, Mutex},
};
use crate::cli::CliArgs;
use crate::common::input::config::Config;
use crate::layout::Layout;
use crate::panes::PaneId;
use async_std::task_local;
use command_is_executing::CommandIsExecuting;
use directories_next::ProjectDirs;
use errors::{
get_current_ctx, AppContext, ContextType, ErrorContext, PluginContext, PtyContext,
ScreenContext,
};
use input::handler::input_loop;
use install::populate_data_dir;
use os_input_output::OsApi;
use pty_bus::{PtyBus, PtyInstruction};
use screen::{Screen, ScreenInstruction};
use serde::{Deserialize, Serialize};
use utils::consts::ZELLIJ_IPC_PIPE;
use wasm_vm::PluginEnv;
use wasm_vm::{wasi_stdout, wasi_write_string, zellij_imports, PluginInstruction};
use wasmer::{ChainableNamedResolver, Instance, Module, Store, Value};
use wasmer_wasi::{Pipe, WasiState};
use zellij_tile::data::{EventType, ModeInfo};
#[derive(Serialize, Deserialize, Debug)]
pub enum ApiCommand {
OpenFile(PathBuf),
SplitHorizontally,
SplitVertically,
MoveFocus,
}
/// An [MPSC](mpsc) asynchronous channel with added error context.
pub type ChannelWithContext<T> = (
mpsc::Sender<(T, ErrorContext)>,
mpsc::Receiver<(T, ErrorContext)>,
);
/// An [MPSC](mpsc) synchronous channel with added error context.
pub type SyncChannelWithContext<T> = (
mpsc::SyncSender<(T, ErrorContext)>,
mpsc::Receiver<(T, ErrorContext)>,
);
/// Wrappers around the two standard [MPSC](mpsc) sender types, [`mpsc::Sender`] and [`mpsc::SyncSender`], with an additional [`ErrorContext`].
#[derive(Clone)]
enum SenderType<T: Clone> {
/// A wrapper around an [`mpsc::Sender`], adding an [`ErrorContext`].
Sender(mpsc::Sender<(T, ErrorContext)>),
/// A wrapper around an [`mpsc::SyncSender`], adding an [`ErrorContext`].
SyncSender(mpsc::SyncSender<(T, ErrorContext)>),
}
/// Sends messages on an [MPSC](std::sync::mpsc) channel, along with an [`ErrorContext`],
/// synchronously or asynchronously depending on the underlying [`SenderType`].
#[derive(Clone)]
pub struct SenderWithContext<T: Clone> {
sender: SenderType<T>,
}
impl<T: Clone> SenderWithContext<T> {
fn new(sender: SenderType<T>) -> Self {
Self { sender }
}
/// Sends an event, along with the current [`ErrorContext`], on this
/// [`SenderWithContext`]'s channel.
pub fn send(&self, event: T) -> Result<(), mpsc::SendError<(T, ErrorContext)>> {
let err_ctx = get_current_ctx();
match self.sender {
SenderType::Sender(ref s) => s.send((event, err_ctx)),
SenderType::SyncSender(ref s) => s.send((event, err_ctx)),
}
}
}
unsafe impl<T: Clone> Send for SenderWithContext<T> {}
unsafe impl<T: Clone> Sync for SenderWithContext<T> {}
thread_local!(
/// A key to some thread local storage (TLS) that holds a representation of the thread's call
/// stack in the form of an [`ErrorContext`].
static OPENCALLS: RefCell<ErrorContext> = RefCell::default()
);
task_local! {
/// A key to some task local storage that holds a representation of the task's call
/// stack in the form of an [`ErrorContext`].
static ASYNCOPENCALLS: RefCell<ErrorContext> = RefCell::default()
}
/// Instructions related to the entire application.
#[derive(Clone)]
pub enum AppInstruction {
Exit,
Error(String),
}
/// Start Zellij with the specified [`OsApi`] and command-line arguments.
// FIXME this should definitely be modularized and split into different functions.
pub fn start(mut os_input: Box<dyn OsApi>, opts: CliArgs) {
let take_snapshot = "\u{1b}[?1049h";
os_input.unset_raw_mode(0);
let _ = os_input
.get_stdout_writer()
.write(take_snapshot.as_bytes())
.unwrap();
env::set_var(&"ZELLIJ", "0");
let config = Config::from_cli_config(opts.config, opts.option)
.map_err(|e| {
eprintln!("There was an error in the config file:\n{}", e);
std::process::exit(1);
})
.unwrap();
let command_is_executing = CommandIsExecuting::new();
let full_screen_ws = os_input.get_terminal_size_using_fd(0);
os_input.set_raw_mode(0);
let (send_screen_instructions, receive_screen_instructions): ChannelWithContext<
ScreenInstruction,
> = mpsc::channel();
let send_screen_instructions =
SenderWithContext::new(SenderType::Sender(send_screen_instructions));
let (send_pty_instructions, receive_pty_instructions): ChannelWithContext<PtyInstruction> =
mpsc::channel();
let send_pty_instructions = SenderWithContext::new(SenderType::Sender(send_pty_instructions));
let (send_plugin_instructions, receive_plugin_instructions): ChannelWithContext<
PluginInstruction,
> = mpsc::channel();
let send_plugin_instructions =
SenderWithContext::new(SenderType::Sender(send_plugin_instructions));
let (send_app_instructions, receive_app_instructions): SyncChannelWithContext<AppInstruction> =
mpsc::sync_channel(0);
let send_app_instructions =
SenderWithContext::new(SenderType::SyncSender(send_app_instructions));
let mut pty_bus = PtyBus::new(
receive_pty_instructions,
send_screen_instructions.clone(),
send_plugin_instructions.clone(),
os_input.clone(),
opts.debug,
);
// Determine and initialize the data directory
let project_dirs = ProjectDirs::from("org", "Zellij Contributors", "Zellij").unwrap();
let data_dir = opts
.data_dir
.unwrap_or_else(|| project_dirs.data_dir().to_path_buf());
populate_data_dir(&data_dir);
// Don't use default layouts in tests, but do everywhere else
#[cfg(not(test))]
let default_layout = Some(PathBuf::from("default"));
#[cfg(test)]
let default_layout = None;
let maybe_layout = opts
.layout
.or(default_layout)
.map(|p| Layout::new(&p, &data_dir));
#[cfg(not(test))]
std::panic::set_hook({
use crate::errors::handle_panic;
let send_app_instructions = send_app_instructions.clone();
Box::new(move |info| {
handle_panic(info, &send_app_instructions);
})
});
let pty_thread = thread::Builder::new()
.name("pty".to_string())
.spawn({
let mut command_is_executing = command_is_executing.clone();
send_pty_instructions.send(PtyInstruction::NewTab).unwrap();
move || loop {
let (event, mut err_ctx) = pty_bus
.receive_pty_instructions
.recv()
.expect("failed to receive event on channel");
err_ctx.add_call(ContextType::Pty(PtyContext::from(&event)));
match event {
PtyInstruction::SpawnTerminal(file_to_open) => {
let pid = pty_bus.spawn_terminal(file_to_open);
pty_bus
.send_screen_instructions
.send(ScreenInstruction::NewPane(PaneId::Terminal(pid)))
.unwrap();
}
PtyInstruction::SpawnTerminalVertically(file_to_open) => {
let pid = pty_bus.spawn_terminal(file_to_open);
pty_bus
.send_screen_instructions
.send(ScreenInstruction::VerticalSplit(PaneId::Terminal(pid)))
.unwrap();
}
PtyInstruction::SpawnTerminalHorizontally(file_to_open) => {
let pid = pty_bus.spawn_terminal(file_to_open);
pty_bus
.send_screen_instructions
.send(ScreenInstruction::HorizontalSplit(PaneId::Terminal(pid)))
.unwrap();
}
PtyInstruction::NewTab => {
if let Some(layout) = maybe_layout.clone() {
pty_bus.spawn_terminals_for_layout(layout);
} else {
let pid = pty_bus.spawn_terminal(None);
pty_bus
.send_screen_instructions
.send(ScreenInstruction::NewTab(pid))
.unwrap();
}
}
PtyInstruction::ClosePane(id) => {
pty_bus.close_pane(id);
command_is_executing.done_closing_pane();
}
PtyInstruction::CloseTab(ids) => {
pty_bus.close_tab(ids);
command_is_executing.done_closing_pane();
}
PtyInstruction::Quit => {
break;
}
}
}
})
.unwrap();
let screen_thread = thread::Builder::new()
.name("screen".to_string())
.spawn({
let mut command_is_executing = command_is_executing.clone();
let os_input = os_input.clone();
let send_pty_instructions = send_pty_instructions.clone();
let send_plugin_instructions = send_plugin_instructions.clone();
let send_app_instructions = send_app_instructions.clone();
let max_panes = opts.max_panes;
move || {
let mut screen = Screen::new(
receive_screen_instructions,
send_pty_instructions,
send_plugin_instructions,
send_app_instructions,
&full_screen_ws,
os_input,
max_panes,
ModeInfo::default(),
);
loop {
let (event, mut err_ctx) = screen
.receiver
.recv()
.expect("failed to receive event on channel");
err_ctx.add_call(ContextType::Screen(ScreenContext::from(&event)));
match event {
ScreenInstruction::PtyBytes(pid, vte_bytes) => {
let active_tab = screen.get_active_tab_mut().unwrap();
if active_tab.has_terminal_pid(pid) {
// it's most likely that this event is directed at the active tab
// look there first
active_tab.handle_pty_bytes(pid, vte_bytes);
} else {
// if this event wasn't directed at the active tab, start looking
// in other tabs
let all_tabs = screen.get_tabs_mut();
for tab in all_tabs.values_mut() {
if tab.has_terminal_pid(pid) {
tab.handle_pty_bytes(pid, vte_bytes);
break;
}
}
}
}
ScreenInstruction::Render => {
screen.render();
}
ScreenInstruction::NewPane(pid) => {
screen.get_active_tab_mut().unwrap().new_pane(pid);
command_is_executing.done_opening_new_pane();
}
ScreenInstruction::HorizontalSplit(pid) => {
screen.get_active_tab_mut().unwrap().horizontal_split(pid);
command_is_executing.done_opening_new_pane();
}
ScreenInstruction::VerticalSplit(pid) => {
screen.get_active_tab_mut().unwrap().vertical_split(pid);
command_is_executing.done_opening_new_pane();
}
ScreenInstruction::WriteCharacter(bytes) => {
screen
.get_active_tab_mut()
.unwrap()
.write_to_active_terminal(bytes);
}
ScreenInstruction::ResizeLeft => {
screen.get_active_tab_mut().unwrap().resize_left();
}
ScreenInstruction::ResizeRight => {
screen.get_active_tab_mut().unwrap().resize_right();
}
ScreenInstruction::ResizeDown => {
screen.get_active_tab_mut().unwrap().resize_down();
}
ScreenInstruction::ResizeUp => {
screen.get_active_tab_mut().unwrap().resize_up();
}
ScreenInstruction::SwitchFocus => {
screen.get_active_tab_mut().unwrap().move_focus();
}
ScreenInstruction::FocusNextPane => {
screen.get_active_tab_mut().unwrap().focus_next_pane();
}
ScreenInstruction::FocusPreviousPane => {
screen.get_active_tab_mut().unwrap().focus_previous_pane();
}
ScreenInstruction::MoveFocusLeft => {
screen.get_active_tab_mut().unwrap().move_focus_left();
}
ScreenInstruction::MoveFocusDown => {
screen.get_active_tab_mut().unwrap().move_focus_down();
}
ScreenInstruction::MoveFocusRight => {
screen.get_active_tab_mut().unwrap().move_focus_right();
}
ScreenInstruction::MoveFocusUp => {
screen.get_active_tab_mut().unwrap().move_focus_up();
}
ScreenInstruction::ScrollUp => {
screen
.get_active_tab_mut()
.unwrap()
.scroll_active_terminal_up();
}
ScreenInstruction::ScrollDown => {
screen
.get_active_tab_mut()
.unwrap()
.scroll_active_terminal_down();
}
ScreenInstruction::PageScrollUp => {
screen
.get_active_tab_mut()
.unwrap()
.scroll_active_terminal_up_page();
}
ScreenInstruction::PageScrollDown => {
screen
.get_active_tab_mut()
.unwrap()
.scroll_active_terminal_down_page();
}
ScreenInstruction::ClearScroll => {
screen
.get_active_tab_mut()
.unwrap()
.clear_active_terminal_scroll();
}
ScreenInstruction::CloseFocusedPane => {
screen.get_active_tab_mut().unwrap().close_focused_pane();
screen.render();
}
ScreenInstruction::SetSelectable(id, selectable) => {
screen
.get_active_tab_mut()
.unwrap()
.set_pane_selectable(id, selectable);
}
ScreenInstruction::SetMaxHeight(id, max_height) => {
screen
.get_active_tab_mut()
.unwrap()
.set_pane_max_height(id, max_height);
}
ScreenInstruction::SetInvisibleBorders(id, invisible_borders) => {
screen
.get_active_tab_mut()
.unwrap()
.set_pane_invisible_borders(id, invisible_borders);
screen.render();
}
ScreenInstruction::ClosePane(id) => {
screen.get_active_tab_mut().unwrap().close_pane(id);
screen.render();
}
ScreenInstruction::ToggleActiveTerminalFullscreen => {
screen
.get_active_tab_mut()
.unwrap()
.toggle_active_pane_fullscreen();
}
ScreenInstruction::NewTab(pane_id) => {
screen.new_tab(pane_id);
command_is_executing.done_opening_new_pane();
}
ScreenInstruction::SwitchTabNext => screen.switch_tab_next(),
ScreenInstruction::SwitchTabPrev => screen.switch_tab_prev(),
ScreenInstruction::CloseTab => screen.close_tab(),
ScreenInstruction::ApplyLayout((layout, new_pane_pids)) => {
screen.apply_layout(layout, new_pane_pids);
command_is_executing.done_opening_new_pane();
}
ScreenInstruction::GoToTab(tab_index) => {
screen.go_to_tab(tab_index as usize)
}
ScreenInstruction::UpdateTabName(c) => {
screen.update_active_tab_name(c);
}
ScreenInstruction::TerminalResize => {
screen.resize_to_screen();
}
ScreenInstruction::ChangeMode(mode_info) => {
screen.change_mode(mode_info);
}
ScreenInstruction::Quit => {
break;
}
}
}
}
})
.unwrap();
let wasm_thread = thread::Builder::new()
.name("wasm".to_string())
.spawn({
let send_pty_instructions = send_pty_instructions.clone();
let send_screen_instructions = send_screen_instructions.clone();
let send_app_instructions = send_app_instructions.clone();
let store = Store::default();
let mut plugin_id = 0;
let mut plugin_map = HashMap::new();
move || loop {
let (event, mut err_ctx) = receive_plugin_instructions
.recv()
.expect("failed to receive event on channel");
err_ctx.add_call(ContextType::Plugin(PluginContext::from(&event)));
match event {
PluginInstruction::Load(pid_tx, path) => {
let plugin_dir = data_dir.join("plugins/");
let wasm_bytes = fs::read(&path)
.or_else(|_| fs::read(&path.with_extension("wasm")))
.or_else(|_| fs::read(&plugin_dir.join(&path).with_extension("wasm")))
.unwrap_or_else(|_| panic!("cannot find plugin {}", &path.display()));
// FIXME: Cache this compiled module on disk. I could use `(de)serialize_to_file()` for that
let module = Module::new(&store, &wasm_bytes).unwrap();
let output = Pipe::new();
let input = Pipe::new();
let mut wasi_env = WasiState::new("Zellij")
.env("CLICOLOR_FORCE", "1")
.preopen(|p| {
p.directory(".") // FIXME: Change this to a more meaningful dir
.alias(".")
.read(true)
.write(true)
.create(true)
})
.unwrap()
.stdin(Box::new(input))
.stdout(Box::new(output))
.finalize()
.unwrap();
let wasi = wasi_env.import_object(&module).unwrap();
let plugin_env = PluginEnv {
plugin_id,
send_pty_instructions: send_pty_instructions.clone(),
send_screen_instructions: send_screen_instructions.clone(),
send_app_instructions: send_app_instructions.clone(),
wasi_env,
subscriptions: Arc::new(Mutex::new(HashSet::new())),
};
let zellij = zellij_imports(&store, &plugin_env);
let instance = Instance::new(&module, &zellij.chain_back(wasi)).unwrap();
let start = instance.exports.get_function("_start").unwrap();
// This eventually calls the `.init()` method
start.call(&[]).unwrap();
plugin_map.insert(plugin_id, (instance, plugin_env));
pid_tx.send(plugin_id).unwrap();
plugin_id += 1;
}
PluginInstruction::Update(pid, event) => {
for (&i, (instance, plugin_env)) in &plugin_map {
let subs = plugin_env.subscriptions.lock().unwrap();
// FIXME: This is very janky... Maybe I should write my own macro for Event -> EventType?
let event_type = EventType::from_str(&event.to_string()).unwrap();
if (pid.is_none() || pid == Some(i)) && subs.contains(&event_type) {
let update = instance.exports.get_function("update").unwrap();
wasi_write_string(
&plugin_env.wasi_env,
&serde_json::to_string(&event).unwrap(),
);
update.call(&[]).unwrap();
}
}
drop(send_screen_instructions.send(ScreenInstruction::Render));
}
PluginInstruction::Render(buf_tx, pid, rows, cols) => {
let (instance, plugin_env) = plugin_map.get(&pid).unwrap();
let render = instance.exports.get_function("render").unwrap();
render
.call(&[Value::I32(rows as i32), Value::I32(cols as i32)])
.unwrap();
buf_tx.send(wasi_stdout(&plugin_env.wasi_env)).unwrap();
}
PluginInstruction::Unload(pid) => drop(plugin_map.remove(&pid)),
PluginInstruction::Quit => break,
}
}
})
.unwrap();
let _signal_thread = thread::Builder::new()
.name("signal_listener".to_string())
.spawn({
let os_input = os_input.clone();
let send_screen_instructions = send_screen_instructions.clone();
move || {
os_input.receive_sigwinch(Box::new(move || {
let _ = send_screen_instructions.send(ScreenInstruction::TerminalResize);
}));
}
})
.unwrap();
// TODO: currently we don't wait for this to quit
// because otherwise the app will hang. Need to fix this so it both
// listens to the ipc-bus and is able to quit cleanly
#[cfg(not(test))]
let _ipc_thread = thread::Builder::new()
.name("ipc_server".to_string())
.spawn({
use std::io::Read;
let send_pty_instructions = send_pty_instructions.clone();
let send_screen_instructions = send_screen_instructions.clone();
move || {
std::fs::remove_file(ZELLIJ_IPC_PIPE).ok();
let listener = std::os::unix::net::UnixListener::bind(ZELLIJ_IPC_PIPE)
.expect("could not listen on ipc socket");
let mut err_ctx = OPENCALLS.with(|ctx| *ctx.borrow());
err_ctx.add_call(ContextType::IpcServer);
for stream in listener.incoming() {
match stream {
Ok(mut stream) => {
let mut buffer = [0; 65535]; // TODO: more accurate
let _ = stream
.read(&mut buffer)
.expect("failed to parse ipc message");
let decoded: ApiCommand = bincode::deserialize(&buffer)
.expect("failed to deserialize ipc message");
match &decoded {
ApiCommand::OpenFile(file_name) => {
let path = PathBuf::from(file_name);
send_pty_instructions
.send(PtyInstruction::SpawnTerminal(Some(path)))
.unwrap();
}
ApiCommand::SplitHorizontally => {
send_pty_instructions
.send(PtyInstruction::SpawnTerminalHorizontally(None))
.unwrap();
}
ApiCommand::SplitVertically => {
send_pty_instructions
.send(PtyInstruction::SpawnTerminalVertically(None))
.unwrap();
}
ApiCommand::MoveFocus => {
send_screen_instructions
.send(ScreenInstruction::FocusNextPane)
.unwrap();
}
}
}
Err(err) => {
panic!("err {:?}", err);
}
}
}
}
})
.unwrap();
let _stdin_thread = thread::Builder::new()
.name("stdin_handler".to_string())
.spawn({
let send_screen_instructions = send_screen_instructions.clone();
let send_pty_instructions = send_pty_instructions.clone();
let send_plugin_instructions = send_plugin_instructions.clone();
let os_input = os_input.clone();
let config = config;
move || {
input_loop(
os_input,
config,
command_is_executing,
send_screen_instructions,
send_pty_instructions,
send_plugin_instructions,
send_app_instructions,
)
}
});
#[warn(clippy::never_loop)]
loop {
let (app_instruction, mut err_ctx) = receive_app_instructions
.recv()
.expect("failed to receive app instruction on channel");
err_ctx.add_call(ContextType::App(AppContext::from(&app_instruction)));
match app_instruction {
AppInstruction::Exit => {
break;
}
AppInstruction::Error(backtrace) => {
let _ = send_screen_instructions.send(ScreenInstruction::Quit);
let _ = screen_thread.join();
let _ = send_pty_instructions.send(PtyInstruction::Quit);
let _ = pty_thread.join();
let _ = send_plugin_instructions.send(PluginInstruction::Quit);
let _ = wasm_thread.join();
os_input.unset_raw_mode(0);
let goto_start_of_last_line = format!("\u{1b}[{};{}H", full_screen_ws.rows, 1);
let restore_snapshot = "\u{1b}[?1049l";
let error = format!(
"{}\n{}{}",
goto_start_of_last_line, restore_snapshot, backtrace
);
let _ = os_input
.get_stdout_writer()
.write(error.as_bytes())
.unwrap();
std::process::exit(1);
}
}
}
let _ = send_pty_instructions.send(PtyInstruction::Quit);
pty_thread.join().unwrap();
let _ = send_screen_instructions.send(ScreenInstruction::Quit);
screen_thread.join().unwrap();
let _ = send_plugin_instructions.send(PluginInstruction::Quit);
wasm_thread.join().unwrap();
// cleanup();
let reset_style = "\u{1b}[m";
let show_cursor = "\u{1b}[?25h";
let restore_snapshot = "\u{1b}[?1049l";
let goto_start_of_last_line = format!("\u{1b}[{};{}H", full_screen_ws.rows, 1);
let goodbye_message = format!(
"{}\n{}{}{}Bye from Zellij!\n",
goto_start_of_last_line, restore_snapshot, reset_style, show_cursor
);
os_input.unset_raw_mode(0);
let _ = os_input
.get_stdout_writer()
.write(goodbye_message.as_bytes())
.unwrap();
os_input.get_stdout_writer().flush().unwrap();
}
|
use std::collections::BTreeMap;
pub fn transform(h: &BTreeMap<i32, Vec<char>>) -> BTreeMap<char, i32> {
let mut output = BTreeMap::new();
for (score, vector) in h {
for iter in vector {
let mut char = iter.clone();
char.make_ascii_lowercase();
output.insert(char , *score);
}
}
output
}
|
use std::collections::VecDeque;
pub struct Snake<V, I> {
pub radius: usize,
pub state: VecDeque<V>,
pub iter: I,
}
|
use std::collections::{HashMap, VecDeque};
use crate::{
abstract_types::ImmutableSchemaAbstractTypes, version::id::VersionId, vtable::id::VTableId,
};
use super::{vrr_entries_in_version::VrrEntriesInVersion, vrr_entry::VrrEntry};
/// Sequence of VrrEntry.
#[derive(Clone, PartialEq, Hash, Debug, new)]
pub struct VrrEntries<Types: ImmutableSchemaAbstractTypes> {
vtable_id: VTableId,
inner: VecDeque<VrrEntry<Types>>,
}
impl<Types: ImmutableSchemaAbstractTypes> VrrEntries<Types> {
/// Order of VrrEntry is kept in each group.
pub fn group_by_version_id(self) -> Vec<VrrEntriesInVersion<Types>> {
let mut h: HashMap<VersionId, VecDeque<VrrEntry<Types>>> = HashMap::new();
for e in self.inner {
let version_id = &e.version_id;
h.entry(version_id.clone())
.and_modify(|entries| {
let e = e.clone(); // don't hold e's ownership for or_insert_with.
entries.push_back(e);
})
.or_insert_with(move || {
let mut v = VecDeque::new();
v.push_back(e);
v
});
}
h.into_iter()
.map(|(version_id, es)| VrrEntriesInVersion::new(version_id, es))
.collect()
}
pub fn vtable_id(&self) -> &VTableId {
&self.vtable_id
}
}
impl<Types: ImmutableSchemaAbstractTypes> Iterator for VrrEntries<Types> {
type Item = VrrEntry<Types>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.pop_front()
}
}
|
mod assets;
mod dex_pallet;
mod dex_xcmp;
mod parachains;
mod token_dealer;
#[cfg(test)]
mod test {
use super::*;
use codec::{Decode, Encode};
use sp_core::crypto;
use sp_core::crypto::Ss58Codec;
use sp_keyring::AccountKeyring;
use sp_std::convert::TryInto;
use std::time::Duration;
use substrate_subxt::{
balances::TransferCall, system::AccountStoreExt, ClientBuilder, KusamaRuntime,
NodeTemplateRuntime, PairSigner,
};
use tokio::time::sleep;
const GENERIC_CHAIN_WS: &str = "ws://127.0.0.1:7744";
const SUBDEX_CHAIN_WS: &str = "ws://127.0.0.1:9944";
const RELAY_ALICE_WS: &str = "ws://127.0.0.1:6644";
const GENERIC_ACCOUNT: &str = "5Ec4AhP7HwJNrY2CxEcFSy1BuqAY3qxvCQCfoois983TTxDA";
// const SUBDEX_ACCOUNT: &str = "5Ec4AhPTL6nWnUnw58QzjJvFd3QATwHA3UJnvSD4GVSQ7Gop";
//const RELAY_ACCOUNT: &str = "5Dvjuthoa1stHkMDTH8Ljr9XaFiVLYe4f9LkAQLDjL3KqHoX";
const SUBDEX_PARA_ID: u32 = 200;
const GENERIC_PARA_ID: u32 = 100;
impl parachains::Parachains for KusamaRuntime {}
impl token_dealer::TokenDealer for NodeTemplateRuntime {
type AssetIdOf = u64;
type ParaId = u32;
}
impl assets::Assets for NodeTemplateRuntime {
type AssetId = u64;
}
impl dex_pallet::DexPallet for NodeTemplateRuntime {
type AssetId = u64;
}
impl dex_xcmp::DexXCMP for NodeTemplateRuntime {
type AssetIdOf = u64;
type ParaId = u32;
type ParaChainAssetId = Option<u64>;
type DexAssetId = Option<u64>;
}
fn encoded_to_remark(mut v: Vec<u8>) -> [u8; 32] {
v.resize(32, 0);
let boxed_slice = v.into_boxed_slice();
let boxed_array: Box<[u8; 32]> = match boxed_slice.try_into() {
Ok(ba) => ba,
Err(o) => panic!("Expected a Vec of length {} but it was {}", 32, o.len()),
};
*boxed_array
}
#[tokio::test]
async fn transfer_tokens_between_dex_and_relay_chains() {
let charlie_account = AccountKeyring::Charlie.to_account_id();
let charlie_generic_pair =
PairSigner::<NodeTemplateRuntime, _>::new(AccountKeyring::Charlie.pair());
let charlie_relay_pair =
PairSigner::<KusamaRuntime, _>::new(AccountKeyring::Charlie.pair());
let transfer_amount = 10_000_000_000_000u128;
let dex_client = ClientBuilder::<NodeTemplateRuntime>::new()
.set_url(SUBDEX_CHAIN_WS)
.build()
.await
.unwrap();
let relay_client = ClientBuilder::<KusamaRuntime>::new()
.set_url(RELAY_ALICE_WS)
.build()
.await
.unwrap();
println!("----- Running transfer currency and tokens from Relay to Dex chain -----");
let charlie_asset_pre = dex_client.account(&charlie_account, None).await.unwrap();
println! {"charlie dex free balance before transfers {:?}", charlie_asset_pre.data.free};
let relay_transfer_asset = relay_client
.watch(
parachains::TransferToParachainCall::<KusamaRuntime> {
to: SUBDEX_PARA_ID,
amount: transfer_amount,
remark: encoded_to_remark(vec![0; 32]),
},
&charlie_relay_pair,
)
.await;
assert! {relay_transfer_asset.is_ok()};
println! {"Transfer Asset from Relay is OK"};
println!("Ensuring block after transfer event...");
sleep(Duration::from_millis(15000)).await;
let charlie_asset_post = dex_client.account(&charlie_account, None).await.unwrap();
println! {"charlie dex free balance after transfers {:?}", charlie_asset_post.data.free};
assert_eq!(
charlie_asset_pre.data.free + transfer_amount,
charlie_asset_post.data.free
);
println!("----- Success! transfer currency from Relay to Dex chain -----");
println!();
println!();
println!();
println!("----- Running transfer currency from Dex to Relay chain -----");
let charlie_relay_pre = relay_client.account(&charlie_account, None).await.unwrap();
println! {"charlie relay account free balance before transfers: {:?}", charlie_relay_pre.data.free};
let transfer_currency_to_relay = dex_client
.watch(
dex_xcmp::TransferBalanceToRelayChainCall {
dest: &charlie_account,
amount: transfer_amount,
},
&charlie_generic_pair,
)
.await;
assert!(transfer_currency_to_relay.is_ok());
println! {"Transfer currency to Relay is OK"};
println!("Ensuring block after transfer event...");
sleep(Duration::from_millis(15000)).await;
let charlie_relay_post = relay_client.account(&charlie_account, None).await.unwrap();
println! {"charlie relay account free balance after transfers: {:?}", charlie_relay_post.data.free};
assert_eq!(
charlie_relay_pre.data.free + transfer_amount,
charlie_relay_post.data.free
);
println!("----- Success! transfer currency and tokens from Dex to Relay chain -----");
}
#[tokio::test]
async fn transfer_tokens_between_generic_and_relay_chains() {
let alice_account = AccountKeyring::Alice.to_account_id();
let alice_generic_pair =
PairSigner::<NodeTemplateRuntime, _>::new(AccountKeyring::Alice.pair());
let alice_relay_pair = PairSigner::<KusamaRuntime, _>::new(AccountKeyring::Alice.pair());
let generic_para_account = crypto::AccountId32::from_string(GENERIC_ACCOUNT).unwrap();
let asset_issue_amount = 50_000_000_000_000u128;
let transfer_amount = 10_000_000_000_000u128;
let generic_client = ClientBuilder::<NodeTemplateRuntime>::new()
.set_url(GENERIC_CHAIN_WS)
.build()
.await
.unwrap();
let relay_client = ClientBuilder::<KusamaRuntime>::new()
.set_url(RELAY_ALICE_WS)
.build()
.await
.unwrap();
println!("----- Running transfer currency and tokens from Generic to Relay chain -----");
// Initialise so generic para has balance on relay chain
let relay_transfer = relay_client
.watch(
TransferCall {
to: &generic_para_account,
amount: asset_issue_amount,
},
&alice_relay_pair,
)
.await;
assert!(relay_transfer.is_ok());
println!("Preset: Relay transfer to para account on relay chain OK",);
// Initialise so we have some generic assets
let issue_asset = generic_client
.watch(
assets::IssueCall::<NodeTemplateRuntime> {
total: asset_issue_amount,
},
&alice_generic_pair,
)
.await;
assert! {issue_asset.is_ok()};
let e = assets::IssuedEvent::<NodeTemplateRuntime>::decode(
&mut &issue_asset.unwrap().events[0].data[..],
)
.unwrap();
println!(
"Preset: Issue some token is OK! New asset_id {:?}",
e.asset_id
);
let issued_asset_id = e.asset_id;
let alice_relay_pre = relay_client.account(&alice_account, None).await.unwrap();
println! {"Alice relay account free balance before transfers: {:?}", alice_relay_pre.data.free};
let transfer_currency_to_relay = generic_client
.watch(
token_dealer::TransferTokensToRelayChainCall::<NodeTemplateRuntime> {
dest: alice_account.clone(),
amount: transfer_amount,
asset_id: None,
},
&alice_generic_pair,
)
.await;
assert!(transfer_currency_to_relay.is_ok());
println! {"Transfer currency to Relay is OK"};
let transfer_asset_to_relay = generic_client
.watch(
token_dealer::TransferTokensToRelayChainCall::<NodeTemplateRuntime> {
dest: alice_account.clone(),
amount: transfer_amount,
asset_id: Some(issued_asset_id),
},
&alice_generic_pair,
)
.await;
assert!(transfer_asset_to_relay.is_ok());
println! {"Transfer asset to Relay is OK"};
println!("Ensure Extrinsic Included ...");
sleep(Duration::from_millis(15000)).await;
let alice_relay_post = relay_client.account(&alice_account, None).await.unwrap();
println! {"Alice relay account free balance after transfers: {:?}", alice_relay_post.data.free};
assert_eq!(
alice_relay_pre.data.free + (2 * transfer_amount),
alice_relay_post.data.free
);
println!("----- Success! transfer currency and tokens from Generic to Relay chain -----");
println!();
println!();
println!();
println!("----- Running transfer currency and tokens from Relay to Generic chain -----");
let alice_asset_pre = generic_client
.fetch(
assets::BalancesStore {
balance_of: (issued_asset_id, &alice_account),
},
None,
)
.await
.unwrap()
.unwrap();
println! {"Alice generic asset account free balance before transfers {:?}", alice_asset_pre};
let remark = Some(issued_asset_id).encode();
let relay_transfer_asset = relay_client
.watch(
parachains::TransferToParachainCall::<KusamaRuntime> {
to: 100,
amount: transfer_amount / 2,
remark: encoded_to_remark(remark),
},
&alice_relay_pair,
)
.await;
assert! {relay_transfer_asset.is_ok()};
println! {"Transfer Asset from Relay is OK"};
let relay_transfer_currency = relay_client
.watch(
parachains::TransferToParachainCall::<KusamaRuntime> {
to: 100,
amount: transfer_amount / 2,
remark: [0u8; 32],
},
&alice_relay_pair,
)
.await;
assert! {relay_transfer_currency.is_ok()};
println! {"Transfer Currency from Relay is OK"};
println!("Ensure Extrinsic Included...");
sleep(Duration::from_millis(15000)).await;
let alice_asset_post = generic_client
.fetch(
assets::BalancesStore {
balance_of: (issued_asset_id, &alice_account),
},
None,
)
.await
.unwrap()
.unwrap();
println! {"Alice generic asset account free balance after transfers {:?}", alice_asset_post};
assert_eq!(alice_asset_pre + (transfer_amount / 2), alice_asset_post);
println!("----- Success! transfer currency and tokens from Relay to Generic chain -----");
println!();
println!();
println!();
}
#[tokio::test]
async fn transfer_tokens_between_generic_and_dex_chain() {
let bob_account = AccountKeyring::Bob.to_account_id();
let bob_pair = PairSigner::<NodeTemplateRuntime, _>::new(AccountKeyring::Bob.pair());
let bob_relay_pair = PairSigner::<KusamaRuntime, _>::new(AccountKeyring::Bob.pair());
let asset_issue_amount = 50_000_000_000_000_000u128;
let transfer_amount = 40_000_000_000_000_000u128;
let generic_client = ClientBuilder::<NodeTemplateRuntime>::new()
.set_url(GENERIC_CHAIN_WS)
.build()
.await
.unwrap();
let dex_client = ClientBuilder::<NodeTemplateRuntime>::new()
.set_url(SUBDEX_CHAIN_WS)
.build()
.await
.unwrap();
// pre-seed
let relay_client = ClientBuilder::<KusamaRuntime>::new()
.set_url(RELAY_ALICE_WS)
.build()
.await
.unwrap();
let relay_transfer_asset = relay_client
.watch(
parachains::TransferToParachainCall::<KusamaRuntime> {
to: SUBDEX_PARA_ID,
amount: transfer_amount,
remark: encoded_to_remark(vec![0; 32]),
},
&bob_relay_pair,
)
.await;
assert! {relay_transfer_asset.is_ok()};
println! {"Transfer Asset from Relay is OK"};
println!("----- Running transfer currency and tokens from Generic to Dex chain -----");
// Initialise so we have some generic assets
let issue_asset = generic_client
.watch(
assets::IssueCall::<NodeTemplateRuntime> {
total: asset_issue_amount,
},
&bob_pair,
)
.await;
assert! {issue_asset.is_ok()};
let e = assets::IssuedEvent::<NodeTemplateRuntime>::decode(
&mut &issue_asset.unwrap().events[0].data[..],
)
.unwrap();
println!(
"Preset: Issue some token is OK! New asset_id {:?}",
e.asset_id
);
let issued_asset_id = e.asset_id;
let dex_currency_id = dex_client
.fetch(
dex_xcmp::AssetIdByParaAssetIdStore {
para_id: GENERIC_PARA_ID,
asset_id: None,
},
None,
)
.await
.unwrap();
let bob_currency_pre = match dex_currency_id {
Some(currency_id) => {
match dex_client
.fetch(
dex_pallet::AssetBalancesStore {
account_id: &bob_account,
asset_id: currency_id,
},
None,
)
.await
.unwrap()
{
Some(pre) => pre,
None => 0,
}
}
None => 0,
};
println! {"Bob account free balance before transfers: {:?}", bob_currency_pre};
let bob_asset_pre = 0;
println! {"Bob account new asset balance before transfers always 0"};
let transfer_asset_to_dex = generic_client
.watch(
token_dealer::TransferAssetsToParachainChainCall::<NodeTemplateRuntime> {
para_id: SUBDEX_PARA_ID,
dest: bob_account.clone(),
amount: transfer_amount,
asset_id: Some(issued_asset_id),
},
&bob_pair,
)
.await;
println! {"Transfer Asset to Dex is OK if error is Codec {:?}", transfer_asset_to_dex};
let transfer_currency_to_dex = generic_client
.watch(
token_dealer::TransferAssetsToParachainChainCall::<NodeTemplateRuntime> {
para_id: SUBDEX_PARA_ID,
dest: bob_account.clone(),
amount: transfer_amount,
asset_id: None,
},
&bob_pair,
)
.await;
println! {"Transfer Currency to Dex is {:?}", transfer_currency_to_dex};
// May want to listen to events here...
println!("Ensure Extrinsics Included...");
sleep(Duration::from_millis(15000)).await;
let dex_asset_id = dex_client
.fetch(
dex_xcmp::AssetIdByParaAssetIdStore {
para_id: GENERIC_PARA_ID,
asset_id: Some(issued_asset_id),
},
None,
)
.await
.unwrap()
.unwrap();
// Can avoid re-fetch if Some()
let dex_currency_id = dex_client
.fetch(
dex_xcmp::AssetIdByParaAssetIdStore {
para_id: GENERIC_PARA_ID,
asset_id: None,
},
None,
)
.await
.unwrap()
.unwrap();
let bob_currency_post = dex_client
.fetch(
dex_pallet::AssetBalancesStore {
account_id: &bob_account,
asset_id: dex_currency_id,
},
None,
)
.await
.unwrap()
.unwrap();
println! {"Bob account free balance after transfers: {:?}", bob_currency_post};
let bob_asset_post = dex_client
.fetch(
dex_pallet::AssetBalancesStore {
account_id: &bob_account,
asset_id: dex_asset_id,
},
None,
)
.await
.unwrap()
.unwrap();
println! {"Bob asset balance after transfers: {:?}", bob_asset_post};
assert_eq!(bob_asset_pre + transfer_amount, bob_asset_post);
assert_eq!(bob_currency_pre + transfer_amount, bob_currency_post);
println!("----- Success! transfer currency and tokens from Generic to Dex chain -----");
println!();
println!();
println!();
println!("----- Running transfer currency and tokens from Dex to Generic chain -----");
let bob_currency_pre = generic_client.account(&bob_account, None).await.unwrap();
println! {"Bob account free balance before transfers: {:?}", bob_currency_pre.data.free};
let bob_asset_pre = generic_client
.fetch(
assets::BalancesStore {
balance_of: (issued_asset_id, &bob_account),
},
None,
)
.await
.unwrap()
.unwrap();
println! {"Bob generic asset account free balance before transfers {:?}", bob_asset_pre};
let dex_transfer_asset = dex_client
.watch(
dex_xcmp::TransferAssetBalanceToParachainChainCall {
para_id: GENERIC_PARA_ID,
dest: &bob_account,
para_asset_id: Some(issued_asset_id),
amount: transfer_amount / 2,
},
&bob_pair,
)
.await;
println! {"Transfer Asset from Dex is OK if codec err {:?}", dex_transfer_asset};
let dex_transfer_currency = dex_client
.watch(
dex_xcmp::TransferAssetBalanceToParachainChainCall {
para_id: GENERIC_PARA_ID,
dest: &bob_account,
para_asset_id: None,
amount: transfer_amount / 2,
},
&bob_pair,
)
.await;
println! {"Transfer Currency from Dex is {:?}", dex_transfer_currency};
println!("Ensuring block after transfer event...");
sleep(Duration::from_millis(15000)).await;
let bob_currency_post = generic_client.account(&bob_account, None).await.unwrap();
println! {"Bob account free balance after transfers: {:?}", bob_currency_post.data.free};
let bob_asset_post = generic_client
.fetch(
assets::BalancesStore {
balance_of: (issued_asset_id, &bob_account),
},
None,
)
.await
.unwrap()
.unwrap();
println! {"Bob generic asset account balance after transfers {:?}", bob_asset_post};
assert_eq!(bob_asset_pre + (transfer_amount / 2), bob_asset_post);
assert_eq!(
bob_currency_pre.data.free + (transfer_amount / 2),
bob_currency_post.data.free
);
println!("----- Success! transfer currency and tokens from Dex to Generic chain -----");
}
}
|
use std::{
collections::HashMap,
fs,
path::{Path, PathBuf},
};
use kdl::{KdlDocument, KdlError, KdlIdentifier, KdlValue};
use miette::IntoDiagnostic;
#[test]
fn spec_compliance() -> miette::Result<()> {
let input = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("tests")
.join("test_cases")
.join("input");
for test_name in fs::read_dir(&input).into_diagnostic()? {
let test_path = test_name.into_diagnostic()?.path();
println!(
"parsing {}:",
PathBuf::from(test_path.file_name().unwrap()).display()
);
let src = normalize_line_endings(fs::read_to_string(&test_path).into_diagnostic()?);
println!("src: {}", src);
let res: Result<KdlDocument, KdlError> = src.parse();
validate_res(res, &test_path)?;
}
Ok(())
}
fn validate_res(res: Result<KdlDocument, KdlError>, path: &Path) -> miette::Result<()> {
let file_name = path.file_name().unwrap();
let expected_dir = path
.parent()
.unwrap()
.parent()
.unwrap()
.join("expected_kdl");
let expected_path = expected_dir.join(file_name);
let underscored = expected_dir.join(&format!("_{}", PathBuf::from(file_name).display()));
if expected_path.exists() {
let doc = res?;
let expected =
normalize_line_endings(fs::read_to_string(&expected_path).into_diagnostic()?);
println!("expected: {}", expected);
let stringified = stringify_to_expected(doc);
println!("stringified: {}", stringified);
assert_eq!(stringified, expected);
} else if underscored.exists() {
println!(
"skipped reserialization for {}",
PathBuf::from(file_name).display()
);
} else {
assert!(res.is_err(), "parse should not have succeeded");
}
Ok(())
}
fn normalize_line_endings(src: String) -> String {
src.replace("\r\n", "\n")
}
fn stringify_to_expected(mut doc: KdlDocument) -> String {
doc.fmt_no_comments();
normalize_numbers(&mut doc);
normalize_strings(&mut doc);
dedupe_props(&mut doc);
remove_empty_children(&mut doc);
doc.to_string()
}
fn normalize_numbers(doc: &mut KdlDocument) {
for node in doc.nodes_mut() {
for entry in node.entries_mut() {
if let Some(value) = entry.value().as_i64() {
*entry.value_mut() = KdlValue::Base10(value);
}
}
if let Some(children) = node.children_mut() {
normalize_numbers(children);
}
}
}
fn normalize_strings(doc: &mut KdlDocument) {
for node in doc.nodes_mut() {
for entry in node.entries_mut() {
if let Some(value) = entry.value().as_string() {
*entry.value_mut() = KdlValue::String(value.to_string());
}
}
if let Some(children) = node.children_mut() {
normalize_strings(children);
}
}
}
fn dedupe_props(doc: &mut KdlDocument) {
for node in doc.nodes_mut() {
let mut props = HashMap::<KdlIdentifier, Vec<usize>>::new();
for (idx, entry) in node.entries_mut().iter_mut().enumerate() {
if let Some(name) = entry.name() {
if !props.contains_key(name) {
props.insert(name.clone(), Vec::new());
}
if let Some(indices) = props.get_mut(name) {
indices.push(idx);
}
}
}
let new_entries = node
.entries()
.iter()
.enumerate()
.filter_map(|(idx, entry)| {
if let Some(name) = entry.name() {
if let Some(indices) = props.get(name) {
if &idx == indices.last().unwrap() {
return Some(entry.clone());
} else {
return None;
}
}
}
Some(entry.clone())
});
*node.entries_mut() = new_entries.collect();
if let Some(children) = node.children_mut() {
dedupe_props(children);
}
}
}
fn remove_empty_children(doc: &mut KdlDocument) {
for node in doc.nodes_mut() {
let maybe_children = node.children_mut();
if maybe_children.is_some() && maybe_children.as_ref().unwrap().nodes().is_empty() {
*maybe_children = None;
}
if let Some(children) = maybe_children {
remove_empty_children(children);
}
}
}
|
// Undo rename from Cargo.toml
extern crate serde_crate as serde;
use serde::{
de::{Deserialize, Deserializer},
ser::{Error as _, Serialize, Serializer},
};
use crate::JsOption;
impl<'de, T> Deserialize<'de> for JsOption<T>
where
T: Deserialize<'de>,
{
/// Deserialize a `JsOption`.
///
/// This implementation will never return `Undefined`. You need to use
/// `#[serde(default)]` to get `Undefined` when the field is not present.
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Option::<T>::deserialize(deserializer).map(Self::from_option)
}
}
impl<T> Serialize for JsOption<T>
where
T: Serialize,
{
/// Serialize a `JsOption`.
///
/// Serialization will fail for `JsOption::Undefined`. You need to use
/// `#[skip_serializing_if = "JsOption::is_undefined"]` to stop the field
/// from being serialized altogether.
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Some(val) => serializer.serialize_some(val),
Self::Null => serializer.serialize_none(),
Self::Undefined => Err(S::Error::custom("attempted to serialize `undefined`")),
}
}
}
|
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use crate::util::lines_from_file;
pub fn day8() {
println!("== Day 8 ==");
let input = lines_from_file("src/day8/input.txt");
let a = part_a(&input);
println!("Part A: {}", a);
let b = part_b(&input);
println!("Part B: {}", b);
}
fn part_a(input: &Vec<String>) -> i32 {
let lines: Vec<Vec<&str>> = input.iter().map(|s| split_line(s)).collect();
let values: Vec<&str> = lines.iter().map(|l| *l.get(1).unwrap()).collect();
let numbers: Vec<i32> = values.iter().map(|s| count_known(*s)).collect();
numbers.iter().sum()
}
fn split_line(line: &String) -> Vec<&str> {
let vec = line.split(" | ").collect();
vec
}
fn count_known(value_line: &str) -> i32 {
let x: Vec<&str> = value_line.split(" ").filter(|s| proper_length(*s)).collect();
x.len() as i32
}
fn proper_length(str: &str) -> bool {
match str.len() {
2 => true, // 1
3 => true, // 7
4 => true, // 4
7 => true, // 8
_ => false
}
}
fn part_b(input: &Vec<String>) -> i32 {
input.iter()
.map(|l| decode_line(l.as_str()))
.sum()
}
fn decode_line(line: &str) -> i32 {
let line_split: Vec<&str> = (*line).split(" | ").collect();
let wires = *line_split.get(0).unwrap();
let digits = *line_split.get(1).unwrap();
let wire_mapping: HashMap<String, i32> = map_wires(wires);
decode_digits(wire_mapping, digits)
}
fn map_wires(input: &str) -> HashMap<String, i32> {
let wires: Vec<&str> = input.split(" ").collect();
let mut mapping: HashMap<String, i32> = HashMap::new();
let mut mapping_inverse: HashMap<i32, &str> = HashMap::new();
for w in wires.iter() {
let wire = *w;
match wire.len() {
2 => {
mapping.insert(wire.to_string(), 1);
mapping_inverse.insert(1, wire);
}
3 => {
mapping.insert(wire.to_string(), 7);
mapping_inverse.insert(7, wire);
}
4 => {
mapping.insert(wire.to_string(), 4);
mapping_inverse.insert(4, wire);
}
7 => {
mapping.insert(wire.to_string(), 8);
mapping_inverse.insert(8, wire);
}
_ => {}
}
}
let two_sub: String = subtract_segments(mapping_inverse.get(&8).unwrap(), mapping_inverse.get(&4).unwrap());
let five_sub: String = subtract_segments(mapping_inverse.get(&4).unwrap(), mapping_inverse.get(&1).unwrap());
mapping.insert(find_wire(&wires, two_sub.as_str(), 2), 2);
let five: String = find_wire(&wires, five_sub.as_str(), 3);
let segments_for_9 = add_segments(mapping_inverse.get(&1).unwrap(), &five.as_str());
mapping.insert(find_correct_wire(&wires, segments_for_9), 9);
let eight_minus_one = subtract_segments(mapping_inverse.get(&8).unwrap(), mapping_inverse.get(&1).unwrap());
let segment_for_6 = add_segments(eight_minus_one.as_str(), &five.as_str());
mapping.insert(find_correct_wire(&wires, segment_for_6), 6);
mapping.insert(five, 5);
for wire in wires {
if !mapping.contains_key(wire) {
match wire.len() {
6 => mapping.insert(String::from(wire), 0),
5 => mapping.insert(wire.to_string(), 3),
_ => None
};
}
}
mapping
}
fn find_correct_wire(wires: &Vec<&str>, segments: String) -> String {
let mut sc: Vec<char> = segments.chars().collect();
sc.sort();
for wire in wires {
let mut chars: Vec<char> = (**wire).chars().collect();
chars.sort();
if chars == sc {
return wire.to_string();
}
}
panic!("Can't find correct wire for {} :: {:?}", segments, wires);
}
fn find_wire(wires: &Vec<&str>, sub_segments: &str, should_equal_len: usize) -> String {
for wire in wires {
if wire.len() == 5 {
let segments = subtract_segments(*wire, sub_segments);
// println!("{} :::: {}", wire ,segments);
if segments.len() == should_equal_len {
return String::from(*wire);
}
}
}
panic!("Could not find wire for length {}! {:?} :: {:?}", should_equal_len, wires, sub_segments)
}
fn subtract_segments(a: &str, b: &str) -> String {
let av: Vec<char> = a.chars().collect();
let bv: Vec<char> = b.chars().collect();
let result: Vec<char> = av.iter().filter(|c| !bv.contains(*c)).map(|c| *c).collect();
String::from_iter(result)
}
fn add_segments(a: &str, b: &str) -> String {
let mut ao = a.to_string().to_owned();
ao.push_str(b);
let mut uniques = HashSet::new();
ao.retain(|c| uniques.insert(c.clone()));
ao
}
fn decode_digits(wire_mapping: HashMap<String, i32>, digits: &str) -> i32 {
// println!("wm = {:?} ::: digits: {}",wire_mapping, digits);
let keys = get_keys(wire_mapping.clone());
let wires = keys.iter().map(|s| &**s).collect();
let as_numbers: Vec<String> = (*digits).split(" ")
.into_iter()
// .map(|d| sort_string(d))
.map(|s| find_correct_wire(&wires, s.to_string()))
.map(|s| wire_mapping.get(s.as_str()).unwrap_or(&-1))
.map(|i| i32::to_string(i))
.collect();
let number_string = String::from_iter(as_numbers);
// println!("{}", number_string);
i32::from_str(number_string.as_str()).unwrap()
}
fn get_keys(map: HashMap<String, i32>) -> Vec<String> {
let mut ret = Vec::new();
for (k, _) in map {
ret.push(k);
}
ret
}
fn sort_string(str: &str) -> String {
let mut x: Vec<char> = (*str).chars().collect::<Vec<char>>();
x.sort();
String::from_iter(x)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part_a_test_input() {
let filename = "src/day8/test-input.txt";
let input = lines_from_file(filename);
let result = part_a(&input);
assert_eq!(26, result);
}
#[test]
fn part_a_real() {
let filename = "src/day8/input.txt";
let input = lines_from_file(filename);
let result = part_a(&input);
assert_eq!(247, result);
}
#[test]
fn sort_string_t() {
assert_eq!("abc", sort_string("cba"))
}
#[test]
fn decode_digit_t() {
let wire_mapping = HashMap::from([
("acb".to_string(), 7),
("cb".to_string(), 1),
("gfabdec".to_string(), 8),
("dcba".to_string(), 4),
("gfabd".to_string(), 5)
]);
let digits = "acb dcba gfabdec cb";
assert_eq!(7481, decode_digits(wire_mapping, digits))
}
#[test]
fn subtract_segment_t() {
let a = "fbcad";
let b = "cdg";
assert_eq!("fba", subtract_segments(a, b))
}
#[test]
fn add_segment_t() {
let a = "abc";
let b = "adgcf";
assert_eq!("abcdgf", add_segments(a, b))
}
#[test]
fn find_correct_wire_t() {
let strings = vec![
"abc",
"fdga",
"cdbaf",
];
assert_eq!("fdga", find_correct_wire(&strings, "adgf".to_string()))
}
#[test]
fn map_wires_t() {
let expected = HashMap::from([
("acedgfb".to_string(), 8),
("cdfbe".to_string(), 5),
("gcdfa".to_string(), 2),
("fbcad".to_string(), 3),
("dab".to_string(), 7),
("cefabd".to_string(), 9),
("cdfgeb".to_string(), 6),
("eafb".to_string(), 4),
("cagedb".to_string(), 0),
("ab".to_string(), 1)
]);
let wires = "acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab";
assert_eq!(expected, map_wires(wires))
}
#[test]
fn decode_line_t() {
let input = "acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab | cdfeb fcadb cdfeb cdbaf";
let result = decode_line(input);
assert_eq!(5353, result)
}
#[test]
fn part_b_test_input() {
let filename = "src/day8/test-input.txt";
let input = lines_from_file(filename);
let result = part_b(&input);
assert_eq!(61229, result);
}
#[test]
fn part_b_real() {
let filename = "src/day8/input.txt";
let input = lines_from_file(filename);
let result = part_b(&input);
assert_eq!(933305, result);
}
} |
use anyhow::{format_err, Error};
use libxml::parser::Parser;
use libxml::tree::{self, Document, NodeType};
use libxml::xpath::Context;
use std::{fmt, ops::Deref, rc::Rc};
#[derive(Debug)]
pub enum Value {
Element(Vec<Node>),
Text(Vec<String>),
None,
}
impl Value {
pub fn into_element(self) -> Option<Vec<Node>> {
match self {
Value::Element(v) => Some(v),
_ => None,
}
}
pub fn into_text(self) -> Option<Vec<String>> {
match self {
Value::Text(v) => Some(v),
_ => None,
}
}
}
pub struct Node {
document: Rc<Document>,
context: Rc<Context>,
node: tree::Node,
}
impl fmt::Debug for Node {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.get_type() {
Some(NodeType::ElementNode) => {
write!(f, "<Element {} at {:p}>", self.get_name(), self.node_ptr())
}
Some(NodeType::AttributeNode) | Some(NodeType::TextNode) => {
write!(f, "{:?}", self.get_content())
}
_ => unimplemented!(),
}
}
}
impl Node {
pub fn xpath_text(&self, xpath: &str) -> Result<Vec<String>, Error> {
match self.xpath(xpath)?.into_text() {
Some(v) => Ok(v),
None => Err(format_err!("not found: {}", xpath)),
}
}
pub fn xpath_elem(&self, xpath: &str) -> Result<Vec<Node>, Error> {
match self.xpath(xpath)?.into_element() {
Some(v) => Ok(v),
None => Err(format_err!("not found: {}", xpath)),
}
}
pub fn xpath(&self, xpath: &str) -> Result<Value, Error> {
let nodes = self
.context
.node_evaluate(xpath, &self.node)
.map_err(|_| format_err!("failed to evaluate xpath: {}", xpath))?
.get_nodes_as_vec();
let result = match nodes.get(0) {
Some(node) => match node.get_type() {
Some(NodeType::ElementNode) => Value::Element(
nodes
.into_iter()
.map(|node| Node {
document: self.document.clone(),
context: self.context.clone(),
node,
})
.collect(),
),
Some(NodeType::AttributeNode) | Some(NodeType::TextNode) => {
Value::Text(nodes.into_iter().map(|node| node.get_content()).collect())
}
_ => unimplemented!(),
},
None => Value::None,
};
Ok(result)
}
}
impl Deref for Node {
type Target = tree::Node;
fn deref(&self) -> &Self::Target {
&self.node
}
}
pub fn parse_html<S: AsRef<str>>(html: S) -> Result<Node, Error> {
let parser = Parser::default_html();
let document = parser
.parse_string(html.as_ref())
.map_err(|_| format_err!("failed to parse html"))?;
let context = Context::new(&document).map_err(|_| format_err!("failed to new context"))?;
let root = document.get_root_element().expect("no root element");
Ok(Node {
document: Rc::new(document),
context: Rc::new(context),
node: root,
})
}
#[cfg(test)]
mod tests {
use crate::xpath::parse_html;
#[test]
fn find_nodes() {
let html = r#"
<!doctype html>
<html lang="zh-CN" dir="ltr">
<head>
<meta charset="utf-8">
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; script-src 'unsafe-inline' resource: chrome:; connect-src https:; img-src https: data: blob:; style-src 'unsafe-inline';">
<title>新标签页</title>
<link rel="icon" type="image/png" href="chrome://branding/content/icon32.png"/>
<link rel="stylesheet" href="chrome://browser/content/contentSearchUI.css" />
<link rel="stylesheet" href="resource://activity-stream/css/activity-stream.css" />
</head>
<body class="activity-stream">
<div id="root"><!-- Regular React Rendering --></div>
<div id="snippets-container">
<div id="snippets"></div>
</div>
<table id="wow" class="lol">
<tr class="head">
<th>Firstname</th>
<th>Lastname</th>
<th>Age</th>
</tr>
<tr class="body">
<td>Jill</td>
<td>Smith</td>
<td>50</td>
</tr>
<tr class="body">
<td>Eve</td>
<td>Jackson</td>
<td>94</td>
</tr>
</table>
</body>
</html>
"#;
let node = parse_html(html).unwrap();
println!("{:?}", node.xpath(r#"//table"#));
println!("{:?}", node.xpath(r#"//table/@class"#));
println!("{:?}", node.xpath(r#"//table//tr"#));
println!("{:?}", node.xpath(r#"//table//th/text()"#));
for td in node.xpath("//td").unwrap().into_element().unwrap() {
println!("{:?}", td.xpath(".//text()"));
}
}
}
|
mod components;
mod raw_terminal_backend;
mod ui;
mod ui_state;
pub use components::*;
pub use raw_terminal_backend::*;
pub use ui::*;
pub use ui_state::*;
|
#![crate_name = "document"]
#![feature(macro_rules)]
use std::fmt;
use std::rc::{Rc,Weak};
use std::cell::RefCell;
use std::collections::hashmap::HashMap;
// FIXME: Parents need to be weakref!
// TODO: See about removing duplication of child / parent implementations.
// TODO: remove clone from inner?
// children
// root nodes -> 1x element, comment, pi
// element nodes -> element, comment, text, pi (attribute, namespace)
// text nodes ->
// attribute nodes ->
// namespace nodes ->
// processing instruction nodes ->
// comment nodes ->
//
// parents
// root nodes ->
// element nodes -> element, root
// text nodes -> element
// attribute nodes -> element
// namespace nodes -> element
// processing instruction nodes -> element
// comment nodes -> element
struct DocumentInner {
// We will always have a root, but during construction we have to
// pick one first
root: Option<Root>,
}
#[deriving(Clone)]
pub struct Document {
inner: Rc<RefCell<DocumentInner>>,
}
impl Document {
pub fn new() -> Document {
let inner = DocumentInner { root: None };
let doc = Document { inner: Rc::new(RefCell::new(inner)) };
let root = Root::new(doc.clone());
doc.inner.borrow_mut().root = Some(root);
doc
}
pub fn new_element(&self, name: String) -> Element {
Element::new(self.clone(), name)
}
pub fn new_text(&self, text: String) -> Text {
Text::new(self.clone(), text)
}
pub fn root(&self) -> Root {
let inner = self.inner.borrow();
inner.root.clone().unwrap()
}
}
impl PartialEq for Document {
fn eq(&self, other: &Document) -> bool {
&*self.inner as *const RefCell<DocumentInner> == &*other.inner as *const RefCell<DocumentInner>
}
}
impl fmt::Show for Document {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Document")
}
}
/// Items that may be children of the root node
#[deriving(Clone,PartialEq)]
pub enum RootChild {
ElementRootChild(Element),
}
impl RootChild {
fn is_element(&self) -> bool {
match self {
&ElementRootChild(_) => true,
}
}
pub fn element(&self) -> Option<Element> {
match self {
&ElementRootChild(ref e) => Some(e.clone()),
}
}
pub fn remove_from_parent(&self) {
match self {
&ElementRootChild(ref e) => e.remove_from_parent(),
}
}
pub fn set_parent(&self, parent: Root) {
match self {
&ElementRootChild(ref e) => e.set_parent(parent),
}
}
}
pub trait ToRootChild {
fn to_root_child(&self) -> RootChild;
}
impl ToRootChild for RootChild {
fn to_root_child(&self) -> RootChild { self.clone() }
}
impl ToRootChild for Element {
fn to_root_child(&self) -> RootChild { ElementRootChild(self.clone()) }
}
#[deriving(Clone)]
struct RootInner {
document: Document,
children: Vec<RootChild>,
}
#[deriving(Clone)]
pub struct Root {
inner: Rc<RefCell<RootInner>>,
}
impl Root {
fn new(document: Document) -> Root {
let inner = RootInner { document: document, children: Vec::new() };
Root { inner: Rc::new(RefCell::new(inner)) }
}
pub fn document(&self) -> Document {
self.inner.borrow().document.clone()
}
fn remove_element_children(&self) {
let mut inner = self.inner.borrow_mut();
inner.children.retain(|c| ! c.is_element());
}
pub fn remove_child<C : ToRootChild>(&self, child: C) {
let child = child.to_root_child();
let mut inner = self.inner.borrow_mut();
inner.children.retain(|c| c != &child);
}
/// This removes any existing element children before appending a new element
pub fn append_child<C : ToRootChild>(&self, child: C) {
let child = child.to_root_child();
if child.is_element() {
self.remove_element_children();
}
child.remove_from_parent();
child.set_parent(self.clone());
let mut inner = self.inner.borrow_mut();
inner.children.push(child);
}
pub fn children(&self) -> Vec<RootChild> {
let inner = self.inner.borrow();
inner.children.clone()
}
}
impl PartialEq for Root {
fn eq(&self, other: &Root) -> bool {
&*self.inner as *const RefCell<RootInner> == &*other.inner as *const RefCell<RootInner>
}
}
impl fmt::Show for Root {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Root element")
}
}
#[deriving(Clone)]
struct TextInner {
document: Document,
text: String,
parent: Option<Element>,
}
#[deriving(Clone)]
pub struct Text {
inner: Rc<RefCell<TextInner>>,
}
impl Text {
fn new(document: Document, text: String) -> Text {
let inner = TextInner {document: document, text: text, parent: None};
Text {inner: Rc::new(RefCell::new(inner))}
}
pub fn document(&self) -> Document {
self.inner.borrow().document.clone()
}
pub fn text(&self) -> String {
let inner = self.inner.borrow();
inner.text.clone()
}
pub fn set_text(&self, text: String) {
let mut inner = self.inner.borrow_mut();
inner.text = text;
}
pub fn remove_from_parent(&self) {
let mut inner = self.inner.borrow_mut();
match inner.parent {
Some(ref e) => e.remove_child(self.clone()),
None => {}
};
inner.parent = None;
}
fn set_parent(&self, parent: Element) {
let mut inner = self.inner.borrow_mut();
inner.parent = Some(parent);
}
pub fn parent(&self) -> Option<Element> {
let inner = self.inner.borrow();
inner.parent.clone()
}
}
impl PartialEq for Text {
fn eq(&self, other: &Text) -> bool {
&*self.inner as *const RefCell<TextInner> == &*other.inner as *const RefCell<TextInner>
}
}
impl fmt::Show for Text {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Text: {}", self.inner.borrow().text)
}
}
#[deriving(Clone)]
struct AttributeInner {
document: Document,
name: String,
value: String,
element: Option<Weak<RefCell<ElementInner>>>,
}
#[deriving(Clone)]
pub struct Attribute {
inner: Rc<RefCell<AttributeInner>>,
}
impl Attribute {
fn new(document: Document, name: String, value: String) -> Attribute {
let inner = AttributeInner {document: document,
name: name,
value: value,
element: None};
Attribute {inner: Rc::new(RefCell::new(inner))}
}
pub fn document(&self) -> Document {
self.inner.borrow().document.clone()
}
pub fn name(&self) -> String {
self.inner.borrow().name.clone()
}
pub fn value(&self) -> String {
self.inner.borrow().value.clone()
}
pub fn parent(&self) -> Option<Element> {
let a = self.inner.borrow();
let b = &a.element;
let c = b.as_ref().and_then(|x| x.upgrade());
let d = c.map(|x| Element {inner: x});
d
}
}
impl PartialEq for Attribute {
fn eq(&self, other: &Attribute) -> bool {
&*self.inner as *const RefCell<AttributeInner> == &*other.inner as *const RefCell<AttributeInner>
}
}
impl fmt::Show for Attribute {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let inner = self.inner.borrow();
write!(f, "@{}='{}'", inner.name, inner.value)
}
}
/// Items that may be children of element nodes
#[deriving(Clone,PartialEq,Show)]
pub enum ElementChild {
ElementElementChild(Element),
TextElementChild(Text),
}
impl ElementChild {
pub fn element(&self) -> Option<Element> {
match self {
&ElementElementChild(ref e) => Some(e.clone()),
_ => None,
}
}
pub fn text(&self) -> Option<Text> {
match self {
&TextElementChild(ref t) => Some(t.clone()),
_ => None,
}
}
pub fn remove_from_parent(&self) {
match self {
&ElementElementChild(ref e) => e.remove_from_parent(),
&TextElementChild(ref t) => t.remove_from_parent(),
}
}
fn set_parent(&self, parent: Element) {
match self {
&ElementElementChild(ref e) => e.set_parent(parent),
&TextElementChild(ref t) => t.set_parent(parent),
}
}
pub fn parent(&self) -> Option<Element> {
match self {
&ElementElementChild(ref e) =>
match e.parent() {
None => None,
Some(ElementElementParent(ref e)) => Some(e.clone()),
_ => fail!("An element's child's parent is not an element")
},
&TextElementChild(ref t) => t.parent(),
}
}
}
pub trait ToElementChild {
fn to_element_child(&self) -> ElementChild;
}
impl ToElementChild for ElementChild {
fn to_element_child(&self) -> ElementChild { self.clone() }
}
impl ToElementChild for Element {
fn to_element_child(&self) -> ElementChild { ElementElementChild(self.clone()) }
}
impl ToElementChild for Text {
fn to_element_child(&self) -> ElementChild { TextElementChild(self.clone()) }
}
impl ToElementChild for RootChild {
fn to_element_child(&self) -> ElementChild {
match self {
&ElementRootChild(ref e) => ElementElementChild(e.clone()),
}
}
}
/// Items that may be parents of element nodes
#[deriving(PartialEq,Clone)]
pub enum ElementParent {
ElementElementParent(Element),
RootElementParent(Root),
}
impl ElementParent {
pub fn element(&self) -> Option<Element> {
match self {
&ElementElementParent(ref e) => Some(e.clone()),
_ => None
}
}
pub fn root(&self) -> Option<Root> {
match self {
&RootElementParent(ref r) => Some(r.clone()),
_ => None
}
}
pub fn remove_child(&self, child: Element) {
match self {
&ElementElementParent(ref e) => e.remove_child(child),
&RootElementParent(ref r) => r.remove_child(child),
}
}
pub fn children(&self) -> Vec<ElementChild> {
match self {
&ElementElementParent(ref e) => e.children(),
&RootElementParent(ref e) => e.children().iter().map(|x| x.to_element_child()).collect(),
}
}
}
pub trait ToElementParent {
fn to_element_parent(&self) -> ElementParent;
}
impl ToElementParent for Element {
fn to_element_parent(&self) -> ElementParent {
ElementElementParent(self.clone())
}
}
impl ToElementParent for Root {
fn to_element_parent(&self) -> ElementParent {
RootElementParent(self.clone())
}
}
#[deriving(Clone)]
struct ElementInner {
document: Document,
name: String,
parent: Option<ElementParent>,
children: Vec<ElementChild>,
attributes: HashMap<String, Attribute>,
}
#[deriving(Clone)]
pub struct Element {
inner: Rc<RefCell<ElementInner>>,
}
// TODO: See about using the attribute value reference as the key to the hash
impl Element {
fn new(document: Document, name: String) -> Element {
let inner = ElementInner {document: document,
name: name,
parent: None,
children: Vec::new(),
attributes: HashMap::new()};
Element {inner: Rc::new(RefCell::new(inner))}
}
pub fn document(&self) -> Document {
let inner = self.inner.borrow();
inner.document.clone()
}
pub fn name(&self) -> String {
let inner = self.inner.borrow();
inner.name.clone()
}
pub fn set_name(&self, name: String) {
let mut inner = self.inner.borrow_mut();
inner.name = name;
}
pub fn parent(&self) -> Option<ElementParent> {
let inner = self.inner.borrow();
inner.parent.clone()
}
// Does not change child at all
fn remove_child<C : ToElementChild>(&self, child: C) {
let child = child.to_element_child();
let mut inner = self.inner.borrow_mut();
inner.children.retain(|c| c != &child);
}
fn remove_from_parent(&self) {
let mut inner = self.inner.borrow_mut();
match inner.parent {
Some(ref e) => e.remove_child(self.clone()),
None => {}
};
inner.parent = None;
}
fn set_parent<P : ToElementParent>(&self, parent: P) {
let parent = parent.to_element_parent();
let mut inner = self.inner.borrow_mut();
inner.parent = Some(parent);
}
pub fn append_child<C : ToElementChild>(&self, child: C) {
let child = child.to_element_child();
child.remove_from_parent();
child.set_parent(self.clone());
let mut inner = self.inner.borrow_mut();
inner.children.push(child.clone());
}
pub fn children(&self) -> Vec<ElementChild> {
let inner = self.inner.borrow();
inner.children.clone()
}
pub fn set_attribute(&self, name: String, value: String) -> Attribute {
let attr = {
let inner = self.inner.borrow();
Attribute::new(inner.document.clone(), name.clone(), value)
};
attr.inner.borrow_mut().element = Some(self.inner.downgrade());
self.inner.borrow_mut().attributes.insert(name, attr.clone());
attr
}
pub fn attributes(&self) -> Vec<Attribute> {
let inner = self.inner.borrow();
inner.attributes.values().map(|a| a.clone()).collect()
}
pub fn each_attribute(&self, f: |&Attribute|) {
let inner = self.inner.borrow();
for attr in inner.attributes.values() {
f(attr);
}
}
pub fn attribute(&self, name: &str) -> Option<Attribute> {
let inner = self.inner.borrow();
inner.attributes.find(&name.to_string()).map(|x| x.clone())
}
// TODO: look into equiv
pub fn get_attribute(&self, name: &str) -> Option<String> {
let inner = self.inner.borrow();
let attr = inner.attributes.find(&name.to_string());
attr.map(|x| x.inner.borrow().value.clone())
}
}
impl PartialEq for Element {
fn eq(&self, other: &Element) -> bool {
// Nodes have reference equality, so we just check to see if
// we are pointing at the same thing.
&*self.inner as *const RefCell<ElementInner> == &*other.inner as *const RefCell<ElementInner>
}
}
impl fmt::Show for Element {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<{}>", self.inner.borrow().name)
}
}
#[deriving(Clone,Show,PartialEq)]
pub enum Any {
ElementAny(Element),
AttributeAny(Attribute),
TextAny(Text),
RootAny(Root),
}
impl Any {
pub fn element(&self) -> Option<Element> {
match self {
&ElementAny(ref e) => Some(e.clone()),
_ => None,
}
}
pub fn attribute(&self) -> Option<Attribute> {
match self {
&AttributeAny(ref e) => Some(e.clone()),
_ => None,
}
}
pub fn text(&self) -> Option<Text> {
match self {
&TextAny(ref e) => Some(e.clone()),
_ => None,
}
}
pub fn root(&self) -> Option<Root> {
match self {
&RootAny(ref e) => Some(e.clone()),
_ => None,
}
}
pub fn document(&self) -> Document {
match self {
&AttributeAny(ref a) => a.document(),
&ElementAny(ref e) => e.document(),
&RootAny(ref r) => r.document(),
&TextAny(ref t) => t.document(),
}
}
pub fn parent(&self) -> Option<Any> {
match self {
&AttributeAny(ref a) => a.parent().map(|x| x.to_any()),
&ElementAny(ref e) => e.parent().map(|x| x.to_any()),
&TextAny(ref t) => t.parent().map(|x| x.to_any()),
&RootAny(_) => None,
}
}
pub fn children(&self) -> Vec<Any> {
match self {
&ElementAny(ref e) => e.children().iter().map(|x| x.to_any()).collect(),
&RootAny(ref r) => r.children().iter().map(|x| x.to_any()).collect(),
&TextAny(_) |
&AttributeAny(_) => Vec::new(),
}
}
}
pub trait ToAny {
fn to_any(&self) -> Any;
}
impl ToAny for Any {
fn to_any(&self) -> Any { self.clone() }
}
impl ToAny for Element {
fn to_any(&self) -> Any { ElementAny(self.clone()) }
}
impl ToAny for Attribute {
fn to_any(&self) -> Any { AttributeAny(self.clone()) }
}
impl ToAny for Text {
fn to_any(&self) -> Any { TextAny(self.clone()) }
}
impl ToAny for Root {
fn to_any(&self) -> Any { RootAny(self.clone()) }
}
impl ToAny for ElementChild {
fn to_any(&self) -> Any {
match self {
&ElementElementChild(ref e) => ElementAny(e.clone()),
&TextElementChild(ref t) => TextAny(t.clone()),
}
}
}
impl ToAny for ElementParent {
fn to_any(&self) -> Any {
match self {
&ElementElementParent(ref e) => ElementAny(e.clone()),
&RootElementParent(ref r) => RootAny(r.clone()),
}
}
}
impl ToAny for RootChild {
fn to_any(&self) -> Any {
match self {
&ElementRootChild(ref r) => ElementAny(r.clone()),
}
}
}
#[macro_export]
macro_rules! nodeset(
($($e:expr),*) => ({
// leading _ to allow empty construction without a warning.
let mut _temp = ::document::Nodeset::new();
$(_temp.add($e);)*
_temp
});
($($e:expr),+,) => (nodeset!($($e),+))
)
#[deriving(Clone,PartialEq,Show)]
pub struct Nodeset {
nodes: Vec<Any>,
}
impl Nodeset {
pub fn new() -> Nodeset {
Nodeset { nodes: Vec::new() }
}
pub fn add<A : ToAny>(&mut self, node: A) {
self.nodes.push(node.to_any());
}
pub fn iter(&self) -> std::slice::Items<Any> {
self.nodes.iter()
}
pub fn add_nodeset(& mut self, other: &Nodeset) {
self.nodes.push_all(other.nodes.as_slice());
}
pub fn size(&self) -> uint {
self.nodes.len()
}
}
#[test]
fn elements_belong_to_a_document() {
let doc = Document::new();
let element = doc.new_element("alpha".to_string());
assert_eq!(doc, element.document());
}
#[test]
fn elements_can_have_element_children() {
let doc = Document::new();
let alpha = doc.new_element("alpha".to_string());
let beta = doc.new_element("beta".to_string());
alpha.append_child(beta.clone());
let children = alpha.children();
let ref child = children[0].element().unwrap();
assert_eq!(beta, *child);
}
#[test]
fn element_children_are_ordered() {
let doc = Document::new();
let greek = doc.new_element("greek".to_string());
let alpha = doc.new_element("alpha".to_string());
let omega = doc.new_element("omega".to_string());
greek.append_child(alpha.clone());
greek.append_child(omega.clone());
let children = greek.children();
assert_eq!(children[0].element().unwrap(), alpha);
assert_eq!(children[1].element().unwrap(), omega);
}
#[test]
fn element_children_know_their_parent() {
let doc = Document::new();
let alpha = doc.new_element("alpha".to_string());
let beta = doc.new_element("beta".to_string());
alpha.append_child(beta);
let ref child = alpha.children()[0];
let parent = child.parent().unwrap();
assert_eq!(alpha, parent);
}
#[test]
fn replacing_parent_updates_original_parent() {
let doc = Document::new();
let parent1 = doc.new_element("parent1".to_string());
let parent2 = doc.new_element("parent2".to_string());
let child = doc.new_element("child".to_string());
parent1.append_child(child.clone());
parent2.append_child(child.clone());
assert!(parent1.children().is_empty());
assert_eq!(1, parent2.children().len());
}
#[test]
fn elements_can_be_renamed() {
let doc = Document::new();
let alpha = doc.new_element("alpha".to_string());
alpha.set_name("beta".to_string());
assert_eq!(alpha.name().as_slice(), "beta");
}
#[test]
fn elements_have_attributes() {
let doc = Document::new();
let e = doc.new_element("element".to_string());
let a = e.set_attribute("hello".to_string(), "world".to_string());
assert_eq!(doc, a.document());
}
#[test]
fn attributes_belong_to_a_document() {
let doc = Document::new();
let element = doc.new_element("alpha".to_string());
assert_eq!(doc, element.document());
}
#[test]
fn attributes_know_their_element() {
let doc = Document::new();
let e = doc.new_element("element".to_string());
let a = e.set_attribute("hello".to_string(), "world".to_string());
assert_eq!(Some(e), a.parent());
}
#[test]
fn attributes_can_be_reset() {
let doc = Document::new();
let e = doc.new_element("element".to_string());
e.set_attribute("hello".to_string(), "world".to_string());
e.set_attribute("hello".to_string(), "galaxy".to_string());
assert_eq!(Some("galaxy".to_string()), e.get_attribute("hello"));
}
#[test]
fn attributes_can_be_iterated() {
let doc = Document::new();
let e = doc.new_element("element".to_string());
e.set_attribute("name1".to_string(), "value1".to_string());
e.set_attribute("name2".to_string(), "value2".to_string());
let mut attrs = e.attributes();
attrs.sort_by(|a, b| a.name().cmp(&b.name()));
assert_eq!(2, attrs.len());
assert_eq!("name1", attrs[0].name().as_slice());
assert_eq!("value1", attrs[0].value().as_slice());
assert_eq!("name2", attrs[1].name().as_slice());
assert_eq!("value2", attrs[1].value().as_slice());
}
#[test]
fn elements_can_have_text_children() {
let doc = Document::new();
let sentence = doc.new_element("sentence".to_string());
let text = doc.new_text("Now is the winter of our discontent.".to_string());
sentence.append_child(text);
let children = sentence.children();
assert_eq!(1, children.len());
let child_text = children[0].text().unwrap();
assert_eq!(child_text.text().as_slice(), "Now is the winter of our discontent.");
}
#[test]
fn text_belongs_to_a_document() {
let doc = Document::new();
let text = doc.new_text("Now is the winter of our discontent.".to_string());
assert_eq!(doc, text.document());
}
#[test]
fn text_knows_its_parent() {
let doc = Document::new();
let sentence = doc.new_element("sentence".to_string());
let text = doc.new_text("Now is the winter of our discontent.".to_string());
sentence.append_child(text.clone());
assert_eq!(text.parent().unwrap(), sentence);
}
#[test]
fn text_can_be_changed() {
let doc = Document::new();
let text = doc.new_text("Now is the winter of our discontent.".to_string());
text.set_text("Made glorious summer by this sun of York".to_string());
assert_eq!(text.text().as_slice(), "Made glorious summer by this sun of York");
}
#[test]
fn the_root_belongs_to_a_document() {
let doc = Document::new();
let root = doc.root();
assert_eq!(doc, root.document());
}
#[test]
fn root_can_have_element_children() {
let doc = Document::new();
let root = doc.root();
let element = doc.new_element("alpha".to_string());
root.append_child(element.clone());
let children = root.children();
assert_eq!(1, children.len());
let child = children[0].element().unwrap();
assert_eq!(child, element);
}
#[test]
fn root_has_maximum_of_one_element_child() {
let doc = Document::new();
let root = doc.root();
let alpha = doc.new_element("alpha".to_string());
let beta = doc.new_element("beta".to_string());
root.append_child(alpha.clone());
root.append_child(beta.clone());
let children = root.children();
assert_eq!(1, children.len());
let child = children[0].element().unwrap();
assert_eq!(child, beta);
}
#[test]
fn element_under_a_root_knows_its_parent_root() {
let doc = Document::new();
let root = doc.root();
let alpha = doc.new_element("alpha".to_string());
root.append_child(alpha.clone());
let parent = alpha.parent().unwrap();
assert_eq!(root, parent.root().unwrap());
}
#[test]
fn nodeset_can_include_all_node_types() {
let doc = Document::new();
let mut nodes = Nodeset::new();
let e = doc.new_element("element".to_string());
let a = e.set_attribute("name".to_string(), "value".to_string());
let t = doc.new_text("text".to_string());
let r = doc.root();
nodes.add(e.clone());
nodes.add(a.clone());
nodes.add(t.clone());
nodes.add(r.clone());
let node_vec: Vec<&Any> = nodes.iter().collect();
assert_eq!(4, node_vec.len());
assert_eq!(e, node_vec[0].element().unwrap());
assert_eq!(a, node_vec[1].attribute().unwrap());
assert_eq!(t, node_vec[2].text().unwrap());
assert_eq!(r, node_vec[3].root().unwrap());
}
#[test]
fn nodesets_can_be_combined() {
let doc = Document::new();
let mut all_nodes = Nodeset::new();
let mut nodes1 = Nodeset::new();
let mut nodes2 = Nodeset::new();
let e1 = doc.new_element("element1".to_string());
let e2 = doc.new_element("element2".to_string());
all_nodes.add(e1.clone());
all_nodes.add(e2.clone());
nodes1.add(e1.clone());
nodes2.add(e2.clone());
nodes1.add_nodeset(&nodes2);
assert_eq!(all_nodes, nodes1);
}
|
use specs::{self, Component};
use components::InitFromBlueprint;
#[derive(Clone, Debug, Deserialize)]
pub enum Renderable {
Shape(Shape),
}
#[derive(Clone, Debug, Deserialize)]
pub struct Shape {
pub width: u32,
pub height: u32,
pub color: [f32; 3],
}
impl Renderable {
pub fn new_shape(width: u32, height: u32, color: [f32; 3]) -> Self {
Renderable::Shape(Shape {
width: width,
height: height,
color: color,
})
}
}
impl Component for Renderable {
type Storage = specs::VecStorage<Renderable>;
}
impl InitFromBlueprint for Renderable {}
|
#[cfg(test)]
#[macro_use]
extern crate serial_test;
#[macro_use]
extern crate log;
use actix_web::{guard, middleware, web, App, HttpServer};
use std::env;
mod controller;
mod error;
mod service;
#[cfg(test)]
mod test_util;
fn get_address() -> String {
match env::var("ADDRESS") {
Ok(value) => value,
Err(_) => "localhost".into(),
}
}
fn get_port() -> String {
match env::var("PORT") {
Ok(value) => value,
Err(_) => "3000".into(),
}
}
fn get_bind() -> String {
format!("{}:{}", get_address(), get_port())
}
macro_rules! create_app {
() => {
App::new().app_data(web::JsonConfig::default().error_handler(error::json_error_handler))
};
}
macro_rules! bind_services {
($app: expr) => {
$app.service(controller::status::handler)
.route(
"/templates/{name}",
web::route()
.guard(guard::Post())
.guard(guard::fn_guard(controller::template_send_multipart::filter))
.to(controller::template_send_multipart::handler),
)
.route(
"/templates/{name}",
web::route()
.guard(guard::Post())
.guard(guard::fn_guard(controller::template_send_json::filter))
.to(controller::template_send_json::handler),
)
};
}
// LCOV_EXCL_START
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
env_logger::init();
let template_provider =
service::template::provider::TemplateProvider::from_env().expect("template provider init");
let smtp_pool = service::smtp::get_pool().expect("smtp service init");
info!("starting server");
HttpServer::new(move || {
bind_services!(create_app!()
.data(template_provider.clone())
.data(smtp_pool.clone())
.wrap(middleware::DefaultHeaders::new().header("X-Version", "0.1.0"))
.wrap(middleware::Logger::default())
.wrap(middleware::Compress::default()))
})
.bind(get_bind())?
.run()
.await
}
#[cfg(test)]
mod tests {
use super::*;
use actix_http::Request;
use actix_web::dev::ServiceResponse;
use actix_web::{test, App};
use reqwest;
use serde::Deserialize;
use uuid::Uuid;
#[derive(Deserialize)]
pub struct Email {
pub html: String,
pub text: String,
}
fn get_inbox_hostname() -> String {
std::env::var("INBOX_HOSTNAME").unwrap_or("localhost".into())
}
fn get_inbox_port() -> String {
std::env::var("INBOX_PORT").unwrap_or("1080".into())
}
pub async fn get_latest_inbox(from: &String, to: &String) -> Vec<Email> {
let url = format!(
"http://{}:{}/api/emails?from={}&to={}",
get_inbox_hostname(),
get_inbox_port(),
from,
to
);
reqwest::get(url.as_str())
.await
.unwrap()
.json::<Vec<Email>>()
.await
.unwrap()
}
pub fn create_email() -> String {
format!("{}@example.com", Uuid::new_v4())
}
pub async fn execute_request(req: Request) -> ServiceResponse {
let template_provider = service::template::provider::TemplateProvider::from_env()
.expect("template provider init");
let smtp_pool = service::smtp::get_pool().expect("smtp service init");
let mut app = test::init_service(bind_services!(create_app!()
.data(template_provider.clone())
.data(smtp_pool.clone())))
.await;
test::call_service(&mut app, req).await
}
#[test]
#[serial]
fn test_get_address() {
std::env::remove_var("ADDRESS");
assert_eq!(get_address(), "localhost");
std::env::set_var("ADDRESS", "something");
assert_eq!(get_address(), "something");
}
#[test]
#[serial]
fn test_get_port() {
std::env::remove_var("PORT");
assert_eq!(get_port(), "3000");
std::env::set_var("PORT", "1234");
assert_eq!(get_port(), "1234");
}
#[test]
#[serial]
fn test_bind() {
std::env::remove_var("ADDRESS");
std::env::remove_var("PORT");
assert_eq!(get_bind(), "localhost:3000");
std::env::set_var("ADDRESS", "something");
std::env::set_var("PORT", "1234");
assert_eq!(get_bind(), "something:1234");
}
}
// LCOV_EXCL_END
|
//! Argument parsing and checking.
//!
use libR_sys::*;
//use crate::robj::*;
use crate::robj::Robj;
/// Convert a list of tokens to an array of tuples.
#[macro_export]
macro_rules! push_args {
($args: expr, $name: ident = $val : expr) => {
$args.push((stringify!($name), Robj::from($val)));
};
($args: expr, $name: ident = $val : expr, $($rest: tt)*) => {
$args.push((stringify!($name), Robj::from($val)));
push_args!($args, $($rest)*);
};
($args: expr, $val : expr) => {
$args.push(("", Robj::from($val)));
};
($args: expr, $val : expr, $($rest: tt)*) => {
$args.push(("", Robj::from($val)));
push_args!($args, $($rest)*);
};
}
#[macro_export]
macro_rules! args {
() => {
Vec::<(&str, Robj)>::new()
};
($($rest: tt)*) => {
{
let mut args = Vec::<(&str, Robj)>::new();
push_args!(args, $($rest)*);
args
}
};
}
pub unsafe fn append_with_name(tail: SEXP, obj: Robj, name: &str) -> SEXP {
let mut name = Vec::from(name.as_bytes());
name.push(0);
let cons = Rf_cons(obj.get(), R_NilValue);
SET_TAG(
cons,
Rf_install(name.as_ptr() as *const std::os::raw::c_char),
);
SETCDR(tail, cons);
cons
}
pub unsafe fn append(tail: SEXP, obj: Robj) -> SEXP {
let cons = Rf_cons(obj.get(), R_NilValue);
SETCDR(tail, cons);
cons
}
pub unsafe fn make_lang(sym: &str) -> Robj {
let mut name = Vec::from(sym.as_bytes());
name.push(0);
let sexp = Rf_lang1(Rf_install(name.as_ptr() as *const std::os::raw::c_char));
Robj::from(sexp)
}
/// Convert a list of tokens to an array of tuples.
#[macro_export]
macro_rules! append_lang {
($tail: ident, $name: ident = $val : expr) => {
$tail = append_with_name($tail, Robj::from($val), stringify!($name));
};
($tail: ident, $name: ident = $val : expr, $($rest: tt)*) => {
$tail = append_with_name($tail, Robj::from($val), stringify!($name));
append_lang!($tail, $($rest)*);
};
($tail: ident, $val : expr) => {
$tail = append($tail, Robj::from($val));
};
($tail: ident, $val : expr, $($rest: tt)*) => {
$tail = append($tail, Robj::from($val));
append_lang!($tail, $($rest)*);
};
}
/// A macro for constructing R langage objects.
#[macro_export]
macro_rules! lang {
($sym : expr) => {
unsafe {
make_lang($sym)
}
};
($sym : expr, $($rest: tt)*) => {
unsafe {
let res = make_lang($sym);
let mut tail = res.get();
append_lang!(tail, $($rest)*);
let _ = tail;
res
}
};
}
#[cfg(test)]
mod tests {
//use crate::args;
use super::*;
use crate::start_r;
#[test]
fn test_args() {
start_r();
assert_eq!(Robj::from(1).eval().unwrap(), Robj::from(1));
//assert_eq!(Robj::from(Lang("ls")), Robj::from(1));
assert_eq!(lang!("+", 1, 1).eval().unwrap(), Robj::from(2));
assert_eq!(lang!("+", x = 1, y = 1).eval().unwrap(), Robj::from(2));
//assert_eq!(Robj::from(Lang("ls")).and(baseenv()).eval().unwrap(), Robj::from(1));
//let plus = Robj::from(Lang("+"));
/*assert_eq!(args!(), vec![]);
assert_eq!(args!(1), vec![("", 1.into())]);
assert_eq!(args!(a=1), vec![("a", 1.into())]);
assert_eq!(args!(2, a=1), vec![("", 2.into()), ("a", 1.into())]);
assert_eq!(args!(1+1), vec![("", Robj::from(2))]);
assert_eq!(args!(1+1, 2), [("", Robj::from(2)), ("", Robj::from(2))]);
assert_eq!(args!(a=1+1, b=2), [("a", Robj::from(2)), ("b", Robj::from(2))]);*/
//end_r();
}
}
|
use std::io::prelude::*;
use std::net::{TcpStream,TcpListener,Ipv4Addr};
pub fn chat_server(){
let ip=Ipv4Addr::new(127,0,0,1);
let port = 3090;
let tcp_s=TcpListener::bind((ip,port)).unwrap();
for stream in tcp_s.incoming(){
let mut stream= stream.unwrap();
let mut buffer=[0;10];
stream.read(&mut buffer).expect("read fail");
println!("you just put this {}",
buffer.iter().map(|x|x.to_string()).collect::<String>());
stream.write(b"hello").unwrap();
}
}
pub fn chat_client(){
let ip=Ipv4Addr::new(127,0,0,1);
let port = 3090;
let input=&[1,2,3,4];
let mut tcp_c=TcpStream::connect((ip,port)).unwrap();
tcp_c.write(input).unwrap();
let mut result =[0;4];
tcp_c.read(&mut result).unwrap();
println!("oh here is result{}",
result.iter().map(|x|x.to_string()).collect::<String>());
}
|
//! Create options for [parse_opts](super::parse_opts).
use super::malformed_html_handlers::{ErrorMismatchedTagHandler, MismatchedTagHandler};
/// Options for [parse_opts](super::parse_opts).
pub struct ParseOptions {
/// Defines the method for handling an end tag that doesn't match the currently opened tag.
pub mismatched_tag_handler: Box<dyn MismatchedTagHandler>,
}
impl ParseOptions {
/// Create a new [ParseOptions] with default values.
pub fn new() -> Self {
Self {
mismatched_tag_handler: Box::new(ErrorMismatchedTagHandler::new()),
}
}
}
impl Default for ParseOptions {
fn default() -> Self {
ParseOptions::new()
}
}
/// Builds [ParseOptions] for the [Parser](crate::html::parse::Parser).
///
/// See [ParseOptions] for the default values used if not set by the builder.
///
/// Example usage:
/// ```rust
/// # use std::error::Error;
/// # fn main() -> Result<(), Box<dyn Error>> {
/// use skyscraper::html::parse::{Parser, ParseOptionsBuilder, malformed_html_handlers::VoidMismatchedTagHandler};
///
/// let options = ParseOptionsBuilder::new()
/// .with_mismatched_tag_handler(Box::new(VoidMismatchedTagHandler::new(None)))
/// .build();
///
/// let parser = Parser::new(options);
/// # Ok(())
/// # }
/// ```
pub struct ParseOptionsBuilder {
reducers: Vec<Box<dyn FnOnce(ParseOptions) -> ParseOptions>>,
}
impl ParseOptionsBuilder {
/// Creates a new [ParseOptionsBuilder].
pub fn new() -> Self {
Self {
reducers: Vec::new(),
}
}
/// Set the type of [MismatchedTagHandler] the parser should use.
pub fn with_mismatched_tag_handler(mut self, handler: Box<dyn MismatchedTagHandler>) -> Self {
let reducer = |options| ParseOptions {
mismatched_tag_handler: handler,
..options
};
self.reducers.push(Box::new(reducer));
self
}
/// Build the [ParseOptions].
pub fn build(self) -> ParseOptions {
self.reducers
.into_iter()
.fold(ParseOptions::new(), |options, f| f(options))
}
}
impl Default for ParseOptionsBuilder {
fn default() -> Self {
ParseOptionsBuilder::new()
}
}
#[cfg(test)]
mod tests {
use crate::html::parse::{
malformed_html_handlers::{MismatchedTagHandlerContext, MockMismatchedTagHandler},
ParserState,
};
use super::*;
#[test]
fn with_mismatched_tag_handler_should_set_handler() {
// arrange
let builder = ParseOptionsBuilder::new();
let mut handler = MockMismatchedTagHandler::new();
let mut context = ParserState {
..Default::default()
};
let context = MismatchedTagHandlerContext {
open_tag_name: "hi",
close_tag_name: "bye",
parser_state: &mut context,
};
handler.expect_invoke().times(1).returning(|_| Ok(()));
// act
let options = builder
.with_mismatched_tag_handler(Box::new(handler))
.build();
// assert
assert!(matches!(
options.mismatched_tag_handler.invoke(context),
Ok(())
));
}
}
|
use std::f64::consts::PI;
#[allow(dead_code)]
pub fn wspace(dt: f64, n: usize) -> Vec<f64> {
let mut w = vec![0.0_f64; n];
for (i, w_in) in w.iter_mut().enumerate().take((n - 1) / 2) {
*w_in = 2. * PI * (i as f64) / (dt * (n as f64));
}
for (i, w_in) in w.iter_mut().enumerate().take(n).skip((n - 1) / 2) {
*w_in = 2. * PI * (i as f64) / (dt * (n as f64)) - 2. * PI / dt;
}
w
}
|
use proc_macro2::{Span, TokenStream};
use syn;
pub(crate) fn enum_count_inner(ast: &syn::DeriveInput) -> TokenStream {
let n = match ast.data {
syn::Data::Enum(ref v) => v.variants.len(),
_ => panic!("EnumCount can only be used with enums"),
};
// Used in the quasi-quotation below as `#name`
let name = &ast.ident;
let const_name = &syn::Ident::new(
&format!("{}_COUNT", name.to_string().to_uppercase()),
Span::call_site(),
);
// Helper is provided for handling complex generic types correctly and effortlessly
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
quote! {
// Implementation
impl #impl_generics ::strum::EnumCount for #name #ty_generics #where_clause {
fn count() -> usize {
#n
}
}
#[allow(dead_code, missing_docs)]
pub const #const_name: usize = #n;
}
}
|
use super::{socket::Socket, DataService, Error};
use crate::command::device_data_security::{types::*, *};
use atat::atat_derive::AtatLen;
use embedded_time::Clock;
use heapless::{ArrayLength, Bucket, Pos};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize, AtatLen)]
pub struct SecurityProfileId(pub u8);
pub trait SSL {
fn import_certificate(
&self,
profile_id: SecurityProfileId,
name: &str,
certificate: &[u8],
) -> Result<(), Error>;
fn import_root_ca(
&self,
profile_id: SecurityProfileId,
name: &str,
root_ca: &[u8],
) -> Result<(), Error>;
fn import_private_key(
&self,
profile_id: SecurityProfileId,
name: &str,
private_key: &[u8],
password: Option<&str>,
) -> Result<(), Error>;
fn enable_ssl(&self, profile_id: SecurityProfileId, server_hostname: &str)
-> Result<(), Error>;
}
impl<'a, C, CLK, N, L> SSL for DataService<'a, C, CLK, N, L>
where
C: atat::AtatClient,
CLK: Clock,
N: ArrayLength<Option<Socket<L, CLK>>>
+ ArrayLength<Bucket<u8, usize>>
+ ArrayLength<Option<Pos>>,
L: ArrayLength<u8>,
{
fn import_certificate(
&self,
profile_id: SecurityProfileId,
name: &str,
certificate: &[u8],
) -> Result<(), Error> {
assert!(name.len() < 200);
self.network.send_internal(
&PrepareSecurityDataImport {
data_type: SecurityDataType::ClientCertificate,
data_size: certificate.len(),
internal_name: name,
password: None,
},
true,
)?;
self.network.send_internal(
&SendSecurityDataImport {
data: serde_at::ser::Bytes(certificate),
},
true,
)?;
self.network.send_internal(
&SecurityProfileManager {
profile_id,
operation: Some(SecurityProfileOperation::ClientCertificateInternalName(
name,
)),
},
true,
)?;
Ok(())
}
fn import_root_ca(
&self,
profile_id: SecurityProfileId,
name: &str,
root_ca: &[u8],
) -> Result<(), Error> {
assert!(name.len() < 200);
self.network.send_internal(
&PrepareSecurityDataImport {
data_type: SecurityDataType::TrustedRootCA,
data_size: root_ca.len(),
internal_name: name,
password: None,
},
true,
)?;
self.network.send_internal(
&SendSecurityDataImport {
data: serde_at::ser::Bytes(root_ca),
},
true,
)?;
self.network.send_internal(
&SecurityProfileManager {
profile_id,
operation: Some(SecurityProfileOperation::TrustedRootCertificateInternalName(name)),
},
true,
)?;
Ok(())
}
fn import_private_key(
&self,
profile_id: SecurityProfileId,
name: &str,
private_key: &[u8],
password: Option<&str>,
) -> Result<(), Error> {
assert!(name.len() < 200);
self.network.send_internal(
&PrepareSecurityDataImport {
data_type: SecurityDataType::ClientPrivateKey,
data_size: private_key.len(),
internal_name: name,
password,
},
true,
)?;
self.network.send_internal(
&SendSecurityDataImport {
data: serde_at::ser::Bytes(private_key),
},
true,
)?;
self.network.send_internal(
&SecurityProfileManager {
profile_id,
operation: Some(SecurityProfileOperation::ClientPrivateKeyInternalName(name)),
},
true,
)?;
Ok(())
}
fn enable_ssl(
&self,
profile_id: SecurityProfileId,
server_hostname: &str,
) -> Result<(), Error> {
self.network.send_internal(
&SecurityProfileManager {
profile_id,
operation: Some(SecurityProfileOperation::CertificateValidationLevel(
CertificateValidationLevel::RootCertValidationWithValidityDate,
)),
},
true,
)?;
self.network.send_internal(
&SecurityProfileManager {
profile_id,
operation: Some(SecurityProfileOperation::CipherSuite(0)),
},
true,
)?;
self.network.send_internal(
&SecurityProfileManager {
profile_id,
operation: Some(SecurityProfileOperation::ExpectedServerHostname(
server_hostname,
)),
},
true,
)?;
Ok(())
}
}
|
use crate::camera::{Camera, CameraConfig};
use crate::color::color;
use crate::hittable::{
box3d::Box3D,
bvh::BvhNode,
constant_medium::ConstantMedium,
hittable_list::HittableList,
rect::{XyRect, XzRect, YzRect},
rotate::RotateY,
translate::Translate,
Hittables,
};
use crate::material::{diffuse::Diffuse, lambertian::Lambertian};
use crate::scenes::Scene;
use crate::texture::solidcolor::SolidColor;
use crate::vec::vec3;
use std::sync::Arc;
#[allow(dead_code)]
pub fn cornell_smoke(t0: f64, t1: f64, aspect_ratio: f64) -> Scene {
let camera = Camera::new(CameraConfig {
lookfrom: vec3(278.0, 278.0, -800.0),
lookat: vec3(278.0, 278.0, 0.0),
vup: vec3(0.0, 1.0, 0.0),
vfov: 40.0,
aspect_ratio: aspect_ratio,
aperture: 0.1,
focus_dist: 10.0,
time0: t0,
time1: t1,
background: color(0.0, 0.0, 0.0),
});
let red = Lambertian::new(SolidColor::new(0.65, 0.05, 0.05));
let white = Lambertian::new(SolidColor::new(0.73, 0.73, 0.73));
let green = Lambertian::new(SolidColor::new(0.12, 0.45, 0.15));
let light = Diffuse::new(SolidColor::new(7.0, 7.0, 7.0));
let blackfog = SolidColor::new(0.0, 0.0, 0.0);
let whitefog = SolidColor::new(1.0, 1.0, 1.0);
let wall1 = YzRect::new(0.0, 555.0, 0.0, 555.0, 555.0, green.clone());
let wall2 = YzRect::new(0.0, 555.0, 0.0, 555.0, 0.0, red.clone());
let wall3 = XzRect::new(0.0, 555.0, 0.0, 555.0, 0.0, white.clone());
let wall4 = XzRect::new(0.0, 555.0, 0.0, 555.0, 555.0, white.clone());
let wall5 = XyRect::new(0.0, 555.0, 0.0, 555.0, 555.0, white.clone());
let box1 = Box3D::new(
vec3(0.0, 0.0, 0.0),
vec3(165.0, 330.0, 165.0),
white.clone(),
);
let box1 = RotateY::new(Arc::new(box1), 15.0);
let box1 = Translate::new(Arc::new(box1), vec3(265.0, 0.0, 295.0));
let box1 = ConstantMedium::new(Arc::new(box1), 0.01, blackfog);
let box2 = Box3D::new(
vec3(0.0, 0.0, 0.0),
vec3(165.0, 165.0, 165.0),
white.clone(),
);
let box2 = RotateY::new(Arc::new(box2), -18.0);
let box2 = Translate::new(Arc::new(box2), vec3(130.0, 0.0, 65.0));
let box2 = ConstantMedium::new(Arc::new(box2), 0.01, whitefog);
let light = XzRect::new(113.0, 443.0, 127.0, 432.0, 554.0, light.clone());
let mut world = HittableList {
hittables: Vec::new(),
};
world.add(wall1);
world.add(wall2);
world.add(wall3);
world.add(wall4);
world.add(wall5);
world.add(box1);
world.add(box2);
world.add(light);
return Scene {
camera: camera,
hittables: Hittables::from(BvhNode::new(world, t0, t1)),
lights: Hittables::from(HittableList {
hittables: Vec::new(),
}),
};
}
|
use super::*;
use octocrab::models::{issues, User};
use reqwest::Url;
use serde::*;
type DateTime = chrono::DateTime<chrono::Utc>;
// milestoned event should include only title
// See: https://docs.github.com/en/developers/webhooks-and-events/events/issue-event-types#milestoned
#[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct MilestonePartial {
pub title: String,
}
// Copied from octocrab::models::ProjectCard to fix null value
#[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct ProjectCard {
pub id: u64,
pub url: Url,
#[serde(skip_serializing_if = "Option::is_none")]
pub project_id: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub project_url: Option<Url>,
#[serde(skip_serializing_if = "Option::is_none")]
pub column_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub previous_column_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub column_url: Option<Url>,
}
// Copied from octocrab::models::IssueEvent
// There are more events than Event enum defined
// Detail: https://docs.github.com/en/developers/webhooks-and-events/issue-event-types
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct IssueEvent {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub node_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub actor: Option<User>,
#[serde(skip_serializing_if = "Option::is_none")]
pub assignee: Option<User>,
#[serde(skip_serializing_if = "Option::is_none")]
pub assigner: Option<User>,
#[serde(skip_serializing_if = "Option::is_none")]
pub review_requester: Option<User>,
#[serde(skip_serializing_if = "Option::is_none")]
pub requested_reviewer: Option<User>,
#[serde(skip_serializing_if = "Option::is_none")]
pub label: Option<Label>,
#[serde(skip_serializing_if = "Option::is_none")]
pub milestone: Option<MilestonePartial>,
#[serde(skip_serializing_if = "Option::is_none")]
pub project_card: Option<ProjectCard>,
#[serde(skip_serializing_if = "Option::is_none")]
pub event: Option<String>, // Used instead of Event
#[serde(skip_serializing_if = "Option::is_none")]
pub commit_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub commit_url: Option<Url>,
pub created_at: DateTime,
pub issue: issues::Issue,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[non_exhaustive]
pub struct Label {
pub name: String,
pub color: String,
}
#[derive(Serialize, Debug)]
pub struct EventRec {
pub id: Option<i64>,
pub node_id: Option<String>,
pub url: Option<String>,
pub actor_id: Option<i64>,
pub assignee_id: Option<i64>,
pub assigner_id: Option<i64>,
pub review_requester_id: Option<i64>,
pub requested_reviewer_id: Option<i64>,
pub label: Option<String>,
pub milestone_title: Option<String>,
pub project_card_url: Option<Url>,
pub event: Option<String>, // Used instead of Event
pub commit_id: Option<String>,
pub commit_url: Option<Url>,
pub created_at: DateTime,
pub issue_id: i64,
pub sdc_repository: String,
}
impl RepositryAware for EventRec {
fn set_repository(&mut self, name: String) {
self.sdc_repository = name;
}
}
impl From<IssueEvent> for EventRec {
fn from(from: IssueEvent) -> Self {
Self {
id: from.id,
node_id: from.node_id,
url: from.url,
actor_id: from.actor.map(|u| u.id),
event: from.event,
assignee_id: from.assignee.map(|u| u.id),
assigner_id: from.assigner.map(|u| u.id),
review_requester_id: from.review_requester.map(|u| u.id),
requested_reviewer_id: from.requested_reviewer.map(|u| u.id),
label: from.label.map(|l| l.name),
milestone_title: from.milestone.map(|m| m.title),
project_card_url: from.project_card.map(|p| p.url),
commit_id: from.commit_id,
commit_url: from.commit_url,
created_at: from.created_at,
issue_id: from.issue.id,
sdc_repository: String::default(),
}
}
}
pub struct IssueEventFetcher {
owner: String,
name: String,
since: Option<DateTime>,
octocrab: octocrab::Octocrab,
}
impl IssueEventFetcher {
pub fn new(
owner: String,
name: String,
since: Option<DateTime>,
octocrab: octocrab::Octocrab,
) -> Self {
Self {
owner,
name,
since,
octocrab,
}
}
}
impl UrlConstructor for IssueEventFetcher {
fn reponame(&self) -> String {
format!("{}/{}", self.owner, self.name)
}
fn entrypoint(&self) -> Option<Url> {
let param = Params::default();
let route = format!(
"repos/{owner}/{repo}/issues/events?{query}",
owner = &self.owner,
repo = &self.name,
query = param.to_query(),
);
self.octocrab.absolute_url(route).ok()
}
}
impl LoopWriter for IssueEventFetcher {
type Model = IssueEvent;
type Record = EventRec;
}
impl IssueEventFetcher {
pub async fn fetch<T: std::io::Write>(&self, mut wtr: csv::Writer<T>) -> octocrab::Result<()> {
let mut next: Option<Url> = self.entrypoint();
while let Some(mut page) = self.octocrab.get_page(&next).await? {
let labels: Vec<IssueEvent> = page.take_items();
let mut last_update: Option<DateTime> = None;
for label in labels.into_iter() {
let mut label: EventRec = label.into();
label.set_repository(self.reponame());
wtr.serialize(&label).expect("Serialize failed");
last_update = label.created_at.into()
}
next = if let Some(since) = self.since {
if last_update.unwrap() < since {
None
} else {
page.next
}
} else {
page.next
};
}
Ok(())
}
}
|
pub fn is_armstrong_number(num: u32) -> bool {
let mut n = num;
let mut digits = Vec::new();
while n > 0 {
digits.push(n % 10);
n /= 10;
}
let exponent = digits.len() as u32;
let digitsum = digits.iter().map(|n| n.pow(exponent)).sum();
num == digitsum
}
|
use crate::common::*;
test! {
name: expected_keyword,
justfile: "foo := if '' == '' { '' } arlo { '' }",
stderr: "
error: Expected keyword `else` but found identifier `arlo`
|
1 | foo := if '' == '' { '' } arlo { '' }
| ^^^^
",
status: EXIT_FAILURE,
}
test! {
name: unexpected_character,
justfile: "!~",
stderr: "
error: Expected character `=`
|
1 | !~
| ^
",
status: EXIT_FAILURE,
}
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under both the MIT license found in the
* LICENSE-MIT file in the root directory of this source tree and the Apache
* License, Version 2.0 found in the LICENSE-APACHE file in the root directory
* of this source tree.
*/
use std::fmt;
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
use std::time::Instant;
use parking_lot::Mutex;
use rand::{
distributions::{Alphanumeric, Distribution, Uniform},
thread_rng, Rng,
};
use chrome_trace::{Args, Event, Trace};
use crate::global;
use crate::guard::TraceGuard;
/// The `TraceContext` type manages all of the state associated with tracing.
/// Typically, a `TraceContext` will be created early on in program execution
/// (such as in `main()`) and held onto for the duration of the program's lifecycle.
///
/// The context can be cheaply cloned, producing contexts which refer to the same
/// internal state. As such, these contexts can easily be tucked away into any struct
/// or module that requires tracing, and easily propagated throughout the program.
///
/// The primary API for tracing things is the `.trace()`` method on the context.
/// This produces a `TraceGuard`, which is an RAII struct that emits a trace event
/// when it goes out of scope. As such, typical usage of this crate would involve
/// creating a `TraceContext` in `main()`, threading the context through the program,
/// cloning as needed, and then calling `.trace()` in each scope to be traced.
#[derive(Debug, Clone)]
pub struct TraceContext {
pub(crate) inner: Arc<TraceContextInner>,
}
impl TraceContext {
/// Initialize a new TraceContext, which manages an individual trace.
/// The trace will be associated with the given id, and all durations logged
/// will be relative to the starting time provided as `epoch`.
pub fn new(id: TraceId, epoch: Instant) -> Self {
let inner = Arc::new(TraceContextInner::new(id, epoch));
// Randomly disable a fraction of all contexts, determined by the global sample rate.
let sample_rate = global::sample_rate();
if Uniform::new(0usize, 100).sample(&mut thread_rng()) >= sample_rate {
inner.disable();
}
Self { inner }
}
/// Start tracing a new event. Returns a TraceGuard object that will log a trace event
/// when it goes out of scope.
pub fn trace<T: ToString>(&self, name: T, args: Option<Args>) -> TraceGuard {
TraceGuard::new(&self.inner, name.to_string(), args.unwrap_or_default())
}
/// Get a copy of the current trace, including all events that have been logged to
/// this context up to this point.
pub fn snapshot(&self) -> Trace {
self.inner.data.lock().trace.clone()
}
/// Get the id of this trace. Useful for logging (e.g., to associate log messages
/// with a particular trace).
pub fn id(&self) -> TraceId {
self.inner.id.clone()
}
/// Enable this trace ignoring the [crate::global::is_enabled] and
/// [crate::global::sample_rate] values
pub fn enable(&self) {
self.inner.enable()
}
/// Disable this trace.
pub fn disable(&self) {
self.inner.disable()
}
/// Check if this trace is enabled or not
pub fn is_enabled(&self) -> bool {
self.inner.is_enabled()
}
}
impl Default for TraceContext {
/// Get a TraceContext with a random id whose start time is set to the time of creation.
/// This is the recommended way of starting a new trace.
fn default() -> Self {
Self::new(generate_trace_id(), Instant::now())
}
}
/// Identifier of a trace
#[derive(Clone, Debug)]
pub struct TraceId(String);
impl TraceId {
/// Use the given string as a trace identifier
pub fn from_string<T: ToString>(s: T) -> Self {
Self(s.to_string())
}
}
impl fmt::Display for TraceId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
/// Generate a [TraceId] randomly from a string of 16 alphanumeric characters
pub fn generate_trace_id() -> TraceId {
TraceId(thread_rng().sample_iter(&Alphanumeric).take(16).collect())
}
#[derive(Debug)]
pub(crate) struct TraceContextInner {
pub(crate) id: TraceId,
pub(crate) epoch: Instant,
pub(crate) enabled: AtomicBool,
pub(crate) data: Mutex<TraceContextMutableData>,
}
impl TraceContextInner {
fn new(id: TraceId, epoch: Instant) -> Self {
Self {
id,
epoch,
enabled: AtomicBool::new(global::is_enabled()),
data: Mutex::new(TraceContextMutableData::new()),
}
}
pub(crate) fn add_event(&self, event: Event) {
if !self.is_enabled() {
return;
}
self.data.lock().trace.add_event(event);
}
pub(crate) fn enable(&self) {
self.enabled.store(true, Ordering::Relaxed)
}
pub(crate) fn disable(&self) {
self.enabled.store(false, Ordering::Relaxed)
}
pub(crate) fn is_enabled(&self) -> bool {
self.enabled.load(Ordering::Relaxed)
}
}
#[derive(Debug, Default)]
pub(crate) struct TraceContextMutableData {
pub(crate) trace: Trace,
}
impl TraceContextMutableData {
fn new() -> Self {
Default::default()
}
}
|
use common::*;
use event::{WindowSettings, EventIterator, EventSettings};
use event_handler::handle_event;
use sdl2_window::*;
use shader_version::opengl::*;
use state::App;
pub fn main() {
debug!("starting");
let mut window = Sdl2Window::new(
OpenGL_3_3,
WindowSettings {
title: "playform".to_string(),
size: [WINDOW_WIDTH as u32, WINDOW_HEIGHT as u32],
fullscreen: false,
exit_on_esc: false,
samples: 0,
}
);
let mut app = App::new();
let mut game_iter =
EventIterator::new(
&mut window,
&EventSettings {
updates_per_second: 30,
max_frames_per_second: 30,
});
loop {
match game_iter.next() {
None => break,
Some(e) => handle_event(&mut app, game_iter.window, e)
}
}
debug!("finished!");
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.