text stringlengths 8 4.13M |
|---|
use crate::robotic::{parse_program, Command};
use crate::{
get_bool, get_byte, get_dword, get_null_terminated_string, get_word, load_palette, Board,
BoardId, ByteString, ColorValue, OverlayMode, Robot, World, WorldError, CHARSET_BUFFER_SIZE,
LEGACY_BOARD_NAME_SIZE, LEGACY_ROBOT_NAME_SIZE, Scroll, Sensor,
};
use itertools::Zip;
use std::io::{Cursor, Read, Seek};
use zip::ZipArchive;
enum PropFile {
World,
Board(u8),
BoardRobot(u8, u8),
BoardScroll(u8, u8),
BoardSensor(u8, u8),
}
impl PropFile {
fn to_string(&self) -> String {
match self {
PropFile::World => "world".to_owned(),
PropFile::Board(bid) => format!("b{:02X}", bid),
PropFile::BoardRobot(bid, rid) => format!("b{:02X}r{:02X}", bid, rid),
PropFile::BoardScroll(bid, sid) => format!("b{:02x}sc{:02X}", bid, sid),
PropFile::BoardSensor(bid, sid) => format!("b{:02x}se{:02X}", bid, sid),
}
}
}
enum Plane {
Id,
Param,
Color,
}
impl Plane {
fn as_str(&self) -> &'static str {
match self {
Plane::Id => "id",
Plane::Param => "pr",
Plane::Color => "co",
}
}
}
enum WorldFile {
GlobalRobot,
//CustomSfx,
Properties(PropFile),
CharSets,
Palette,
BoardLevel(u8, Plane),
BoardUnder(u8, Plane),
BoardOverlayChar(u8),
BoardOverlayColor(u8),
}
impl WorldFile {
fn to_string(&self) -> String {
match self {
WorldFile::GlobalRobot => "gr".to_owned(),
/*WorldFile::CustomSfx => "sfx".to_owned(),*/
WorldFile::CharSets => "chars".to_owned(),
WorldFile::Palette => "pal".to_owned(),
WorldFile::BoardLevel(bid, plane) => format!("b{:02X}b{}", bid, plane.as_str()),
WorldFile::BoardUnder(bid, plane) => format!("b{:02X}u{}", bid, plane.as_str()),
WorldFile::BoardOverlayChar(bid) => format!("b{:02X}och", bid),
WorldFile::BoardOverlayColor(bid) => format!("b{:02X}oco", bid),
WorldFile::Properties(file) => file.to_string(),
}
}
}
trait ReadFile {
fn read_known_file(&mut self, f: WorldFile) -> Result<Vec<u8>, ()>;
}
impl<R: Read + Seek> ReadFile for ZipArchive<R> {
fn read_known_file(&mut self, f: WorldFile) -> Result<Vec<u8>, ()> {
println!("opening {:?}", f.to_string());
let mut file = self.by_name(&f.to_string()).map_err(|_| ())?;
let mut contents = vec![];
file.read_to_end(&mut contents).unwrap();
Ok(contents)
}
}
pub(crate) fn load_zip_world(buffer: &[u8]) -> Result<World, WorldError> {
let buffer = Cursor::new(buffer);
let mut zip = ZipArchive::new(buffer).unwrap();
for name in zip.file_names() {
println!("file: {:?}", name);
}
let world_props = zip
.read_known_file(WorldFile::Properties(PropFile::World))
.unwrap();
let mut world = load_world_info(&world_props).unwrap();
let global_robot_props = zip.read_known_file(WorldFile::GlobalRobot).unwrap();
world.global_robot = load_robot(&global_robot_props).unwrap();
let charset = zip.read_known_file(WorldFile::CharSets).unwrap();
//XXXjdm support all the reserved charsets
world
.state
.charset
.data
.copy_from_slice(&charset[0..CHARSET_BUFFER_SIZE]);
world
.state
.initial_charset
.data
.copy_from_slice(&charset[0..CHARSET_BUFFER_SIZE]);
println!("{:?}", world.state.charset);
let palette = zip.read_known_file(WorldFile::Palette).unwrap();
let palette = load_palette(&palette[0..16 * 3]); //XXXjdm support larger palettes
world.state.palette = palette.clone();
world.state.initial_palette = palette;
for i in 0..world.boards.len() as u8 {
let board_props = match zip.read_known_file(WorldFile::Properties(PropFile::Board(i))) {
Ok(props) => props,
Err(_) => continue,
};
let mut board = load_board(&board_props).unwrap();
let level_id = zip
.read_known_file(WorldFile::BoardLevel(i, Plane::Id))
.unwrap();
let level_param = zip
.read_known_file(WorldFile::BoardLevel(i, Plane::Param))
.unwrap();
let level_color = zip
.read_known_file(WorldFile::BoardLevel(i, Plane::Color))
.unwrap();
board.level = Zip::new((level_id, level_color, level_param)).collect();
let under_id = zip
.read_known_file(WorldFile::BoardUnder(i, Plane::Id))
.unwrap();
let under_param = zip
.read_known_file(WorldFile::BoardUnder(i, Plane::Param))
.unwrap();
let under_color = zip
.read_known_file(WorldFile::BoardUnder(i, Plane::Color))
.unwrap();
board.under = Zip::new((under_id, under_color, under_param)).collect();
if let Some((_, ref mut overlay)) = board.overlay {
let overlay_char = zip.read_known_file(WorldFile::BoardOverlayChar(i)).unwrap();
let overlay_color = zip
.read_known_file(WorldFile::BoardOverlayColor(i))
.unwrap();
*overlay = overlay_char.into_iter().zip(overlay_color).collect();
}
let mut robots = vec![];
for r in 0..board.num_robots {
let robot_props = match zip
.read_known_file(WorldFile::Properties(PropFile::BoardRobot(i, r as u8 + 1)))
{
Ok(r) => r,
Err(_) => {
robots.push(Robot::default());
continue;
}
};
let robot = load_robot(&robot_props).unwrap();
robots.push(robot);
}
let mut scrolls = vec![];
for r in 0..board.num_scrolls {
let scroll_props = match zip
.read_known_file(WorldFile::Properties(PropFile::BoardScroll(i, r as u8 + 1)))
{
Ok(r) => r,
Err(_) => {
scrolls.push(Scroll::default());
continue;
}
};
let robot = load_scroll(&scroll_props).unwrap();
scrolls.push(robot);
}
board.scrolls = scrolls;
let mut sensors = vec![];
for r in 0..board.num_sensors {
let sensor_props = match zip
.read_known_file(WorldFile::Properties(PropFile::BoardSensor(i, r as u8 + 1)))
{
Ok(r) => r,
Err(_) => {
sensors.push(Sensor::default());
continue;
}
};
let sensor = load_sensor(&sensor_props).unwrap();
sensors.push(sensor);
}
board.sensors = sensors;
board.init(&mut robots);
world.boards[i as usize] = (board, robots);
}
Ok(world)
}
#[derive(Debug)]
enum WorldProp {
Name(ByteString),
WorldVersion(u16),
FileVersion(u16),
//SaveStartBoard(u8),
//SaveHasTemp(u8),
NumberOfBoards(u8),
IdBlock([u8; 323]),
/*IdMissileColor(u8),
IdBulletColors([u8; 3]),*/
IdBlock2([u8; 128]),
/*StatusCounters([ByteString; 6]),*/
EdgeColor(u8),
FirstBoard(u8),
/*EndgameBoard(u8),
DeathBoard(u8),
EndgameTeleportX(u16),
EndgameTeleportY(u16),
GameOverSfx(bool),
DeathTeleportX(u16),
DeathTeleportY(u16),
StartingLives(u16),
LivesLimit(u16),
StartingHealth(u16),
HealthLimit(u16),
EnemyBulletsHurtOthers(bool),
ClearMessagesOnExit(bool),
PlayFromSwapWorld(bool),*/
}
impl WorldProp {
//const END: u16 = 0x0000;
const WORLD_NAME: u16 = 0x0001;
const WORLD_VERSION: u16 = 0x0002;
const FILE_VERSION: u16 = 0x0003;
//const SAVE_START: u16 = 0x0004;
//const SAVE_HAS_TEMPORARY: u16 = 0x0005;
const NUM_BOARDS: u16 = 0x0008;
const CHAR_ID_BLOCK_1: u16 = 0x0010;
/*const CHAR_ID_MISSILE: u16 = 0x0011;
const CHAR_ID_BULLETS: u16 = 0x0012;*/
const CHAR_ID_BLOCK_3: u16 = 0x0013;
/*const STATUS_COUNTERS: u16 = 0x0018;*/
const EDGE_COLOR: u16 = 0x0020;
const FIRST_BOARD: u16 = 0x0021;
/*const ENDGAME_BOARD: u16 = 0x0022;
const DEATH_BOARD: u16 = 0x0023;*/
}
const END_PROP: u16 = 0x0000;
fn next_prop<T>(
mut buffer: &[u8],
read: fn(u16, &[u8]) -> Result<T, ()>,
) -> Result<(Option<T>, &[u8]), ()> {
loop {
let (id, tmp_buffer) = get_word(buffer);
if id == END_PROP {
return Ok((None, tmp_buffer));
}
if tmp_buffer.len() < 4 {
return Ok((None, tmp_buffer));
}
let (size, tmp_buffer) = get_dword(tmp_buffer);
let (data, tmp_buffer) = tmp_buffer.split_at(size as usize);
if let Ok(prop) = read(id, data) {
return Ok((Some(prop), tmp_buffer));
}
buffer = tmp_buffer;
}
}
impl WorldProp {
fn read(id: u16, buffer: &[u8]) -> Result<WorldProp, ()> {
Ok(match id {
WorldProp::WORLD_NAME => {
WorldProp::Name(get_null_terminated_string(buffer, LEGACY_BOARD_NAME_SIZE).0)
}
WorldProp::WORLD_VERSION => WorldProp::WorldVersion(get_word(buffer).0),
WorldProp::FILE_VERSION => WorldProp::FileVersion(get_word(buffer).0),
WorldProp::NUM_BOARDS => WorldProp::NumberOfBoards(get_byte(buffer).0),
WorldProp::CHAR_ID_BLOCK_1 => {
let mut block = [0; 323];
block.copy_from_slice(buffer);
WorldProp::IdBlock(block)
}
WorldProp::CHAR_ID_BLOCK_3 => {
let mut block = [0; 128];
block.copy_from_slice(buffer);
WorldProp::IdBlock2(block)
}
WorldProp::EDGE_COLOR => WorldProp::EdgeColor(get_byte(buffer).0),
WorldProp::FIRST_BOARD => WorldProp::FirstBoard(get_byte(buffer).0),
_ => return Err(()),
})
}
fn apply(self, world: &mut World) {
match self {
WorldProp::Name(name) => world.title = name,
WorldProp::WorldVersion(v) => world.version = v as u32, //XXXjdm
WorldProp::FileVersion(_v) => {}
WorldProp::NumberOfBoards(b) => world.boards.resize_with(b as usize, Default::default),
WorldProp::IdBlock(block) => world.state.idchars[0..323].copy_from_slice(&block),
WorldProp::IdBlock2(block) => {
let start = world.state.idchars.len() - 128;
let end = world.state.idchars.len();
world.state.idchars[start..end].copy_from_slice(&block)
}
WorldProp::EdgeColor(c) => world.edge_border = ColorValue(c),
WorldProp::FirstBoard(b) => world.starting_board_number = BoardId(b),
//_ => unimplemented!(),
}
}
}
fn load_world_info(mut buffer: &[u8]) -> Result<World, ()> {
let mut world = World::default();
loop {
let (prop, tmp_buffer) = next_prop(buffer, WorldProp::read).unwrap();
match prop {
Some(prop) => {
println!("{:?}", prop);
prop.apply(&mut world);
}
None => break,
}
buffer = tmp_buffer;
}
Ok(world)
}
#[derive(Debug)]
enum ScrollProp {
Lines(u16),
Text(ByteString),
}
impl ScrollProp {
const LINES: u16 = 0x0001;
const TEXT: u16 = 0x0002;
fn read(id: u16, buffer: &[u8]) -> Result<ScrollProp, ()> {
Ok(match id {
ScrollProp::LINES => ScrollProp::Lines(get_word(buffer).0),
ScrollProp::TEXT => {
ScrollProp::Text(get_null_terminated_string(buffer, usize::MAX).0)
}
_ => return Err(()),
})
}
fn apply(self, scroll: &mut Scroll) {
scroll.used = true;
match self {
ScrollProp::Lines(lines) => scroll.num_lines = lines,
ScrollProp::Text(bytes) => scroll.text = bytes,
}
}
}
#[derive(Debug)]
enum SensorProp {
Name(ByteString),
Char(u8),
Robot(ByteString),
}
impl SensorProp {
const NAME: u16 = 0x0001;
const CHAR: u16 = 0x0002;
const ROBOT: u16 = 0x0003;
fn read(id: u16, buffer: &[u8]) -> Result<SensorProp, ()> {
Ok(match id {
SensorProp::NAME => {
SensorProp::Name(get_null_terminated_string(buffer, LEGACY_ROBOT_NAME_SIZE).0)
}
SensorProp::CHAR => SensorProp::Char(get_byte(buffer).0),
SensorProp::ROBOT => {
SensorProp::Robot(get_null_terminated_string(buffer, LEGACY_ROBOT_NAME_SIZE).0)
}
_ => return Err(()),
})
}
fn apply(self, sensor: &mut Sensor) {
sensor.used = true;
match self {
SensorProp::Name(name) => sensor.name = name,
SensorProp::Char(ch) => sensor.ch = ch,
SensorProp::Robot(robot) => sensor.target = robot,
}
}
}
#[derive(Debug)]
enum RobotProp {
Name(ByteString),
Char(u8),
X(i16),
Y(i16),
Program(Vec<Command>),
}
impl RobotProp {
const NAME: u16 = 0x0001;
const CHAR: u16 = 0x0002;
const X_ID: u16 = 0x0003;
const Y_ID: u16 = 0x0004;
const PROGRAM: u16 = 0x00FF;
fn read(id: u16, buffer: &[u8]) -> Result<RobotProp, ()> {
Ok(match id {
RobotProp::NAME => {
RobotProp::Name(get_null_terminated_string(buffer, LEGACY_ROBOT_NAME_SIZE).0)
}
RobotProp::CHAR => RobotProp::Char(get_byte(buffer).0),
RobotProp::X_ID => RobotProp::X(get_word(buffer).0 as i16),
RobotProp::Y_ID => RobotProp::Y(get_word(buffer).0 as i16),
RobotProp::PROGRAM => RobotProp::Program(parse_program(buffer, true)),
_ => return Err(()),
})
}
fn apply(self, robot: &mut Robot) {
match self {
RobotProp::Name(name) => robot.name = name,
RobotProp::Char(ch) => robot.ch = ch,
RobotProp::X(x) => robot.position.0 = x as u16, //XXXjdm
RobotProp::Y(y) => robot.position.1 = y as u16, //XXXjdm
RobotProp::Program(program) => robot.program = program,
}
}
}
fn load_scroll(mut buffer: &[u8]) -> Result<Scroll, ()> {
let mut scroll = Scroll::default();
loop {
let (prop, tmp_buffer) = next_prop(buffer, ScrollProp::read).unwrap();
match prop {
Some(prop) => {
println!("{:?}", prop);
prop.apply(&mut scroll);
}
None => break,
}
buffer = tmp_buffer;
}
Ok(scroll)
}
fn load_sensor(mut buffer: &[u8]) -> Result<Sensor, ()> {
let mut sensor = Sensor::default();
loop {
let (prop, tmp_buffer) = next_prop(buffer, SensorProp::read).unwrap();
match prop {
Some(prop) => {
println!("{:?}", prop);
prop.apply(&mut sensor);
}
None => break,
}
buffer = tmp_buffer;
}
Ok(sensor)
}
fn load_robot(mut buffer: &[u8]) -> Result<Robot, ()> {
let mut robot = Robot::default();
loop {
let (prop, tmp_buffer) = next_prop(buffer, RobotProp::read).unwrap();
match prop {
Some(prop) => {
println!("{:?}", prop);
prop.apply(&mut robot);
}
None => break,
}
buffer = tmp_buffer;
}
Ok(robot)
}
#[derive(Debug)]
enum BoardProp {
Name(ByteString),
Width(u16),
Height(u16),
Overlay(u8),
Robots(u8),
Scrolls(u8),
Sensors(u8),
//FileVersion(u16),
Mod(ByteString),
ViewX(u8),
ViewY(u8),
ViewW(u8),
ViewH(u8),
Shoot(bool),
/*Bomb(bool),
BurnBrown(bool),
BurnSpace(bool),
BurnFake(bool),
BurnTree(bool),
ExplosionsLeave(u8),
SaveMode(u8),
ForestFloor(bool),
CollectBombs(bool),
BurnForever(bool),*/
North(BoardId),
South(BoardId),
East(BoardId),
West(BoardId),
/*RestardOnZap(bool),
TimeLimit(u16),
LockedNS(bool),
LockedEW(bool),
AttackLocked(bool),
ResetOnEntry(bool),
Charset(ByteString),
Palette(ByteString),*/
}
impl BoardProp {
const NAME: u16 = 0x0001;
const WIDTH: u16 = 0x0002;
const HEIGHT: u16 = 0x0003;
const OVERLAY: u16 = 0x0004;
const ROBOT_COUNT: u16 = 0x0005;
const SCROLL_COUNT: u16 = 0x006;
const SENSOR_COUNT: u16 = 0x007;
const MOD: u16 = 0x0010;
const VIEWPORT_X: u16 = 0x0011;
const VIEWPORT_Y: u16 = 0x0012;
const VIEWPORT_W: u16 = 0x0013;
const VIEWPORT_H: u16 = 0x0014;
const CAN_SHOOT: u16 = 0x0015;
const NORTH: u16 = 0x0020;
const SOUTH: u16 = 0x0021;
const EAST: u16 = 0x0022;
const WEST: u16 = 0x0023;
fn read(id: u16, buffer: &[u8]) -> Result<BoardProp, ()> {
Ok(match id {
BoardProp::NAME => {
BoardProp::Name(get_null_terminated_string(buffer, LEGACY_BOARD_NAME_SIZE).0)
}
BoardProp::WIDTH => BoardProp::Width(get_word(buffer).0),
BoardProp::HEIGHT => BoardProp::Height(get_word(buffer).0),
BoardProp::OVERLAY => BoardProp::Overlay(get_byte(buffer).0),
BoardProp::ROBOT_COUNT => BoardProp::Robots(get_byte(buffer).0),
BoardProp::SCROLL_COUNT => BoardProp::Scrolls(get_byte(buffer).0),
BoardProp::SENSOR_COUNT => BoardProp::Sensors(get_byte(buffer).0),
BoardProp::MOD => BoardProp::Mod(ByteString::from(buffer)),
BoardProp::VIEWPORT_X => BoardProp::ViewX(get_byte(buffer).0),
BoardProp::VIEWPORT_Y => BoardProp::ViewY(get_byte(buffer).0),
BoardProp::VIEWPORT_W => BoardProp::ViewW(get_byte(buffer).0),
BoardProp::VIEWPORT_H => BoardProp::ViewH(get_byte(buffer).0),
BoardProp::CAN_SHOOT => BoardProp::Shoot(get_bool(buffer).0),
BoardProp::NORTH => BoardProp::North(BoardId(get_byte(buffer).0)),
BoardProp::SOUTH => BoardProp::South(BoardId(get_byte(buffer).0)),
BoardProp::EAST => BoardProp::East(BoardId(get_byte(buffer).0)),
BoardProp::WEST => BoardProp::West(BoardId(get_byte(buffer).0)),
_ => return Err(()),
})
}
fn apply(self, board: &mut Board) {
match self {
BoardProp::Name(name) => board.title = name,
BoardProp::Width(w) => board.width = w as usize,
BoardProp::Height(h) => board.height = h as usize,
BoardProp::Overlay(mode) => {
if mode != 0 {
board.overlay = Some((OverlayMode::from_byte(mode).unwrap(), vec![]));
}
}
BoardProp::Robots(count) => {
board.num_robots = count as usize;
}
BoardProp::Scrolls(count) => {
board.num_scrolls = count as usize;
}
BoardProp::Sensors(count) => {
board.num_sensors = count as usize;
}
BoardProp::Mod(file) => board.mod_file = file.into_string(),
BoardProp::ViewX(x) => board.upper_left_viewport.0 = x,
BoardProp::ViewY(y) => board.upper_left_viewport.1 = y,
BoardProp::ViewW(w) => board.viewport_size.0 = w,
BoardProp::ViewH(h) => board.viewport_size.1 = h,
BoardProp::Shoot(b) => board.can_shoot = b,
BoardProp::North(b) => board.exits.0 = Some(b),
BoardProp::South(b) => board.exits.1 = Some(b),
BoardProp::East(b) => board.exits.2 = Some(b),
BoardProp::West(b) => board.exits.3 = Some(b),
}
}
}
fn load_board(mut buffer: &[u8]) -> Result<Board, ()> {
let mut board = Board::default();
loop {
let (prop, tmp_buffer) = next_prop(buffer, BoardProp::read).unwrap();
match prop {
Some(prop) => {
println!("{:?}", prop);
prop.apply(&mut board);
}
None => break,
}
buffer = tmp_buffer;
}
Ok(board)
}
|
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use cosmwasm_std::{Binary, CanonicalAddr, Storage, Uint128};
use cosmwasm_storage::{
bucket, bucket_read, prefixed, prefixed_read, singleton, singleton_read, Bucket,
PrefixedStorage, ReadonlyBucket, ReadonlyPrefixedStorage, ReadonlySingleton, Singleton,
};
pub static CONFIG_KEY: &[u8] = b"config";
const BEACONS_KEY: &[u8] = b"beacons";
const COMBINATION_KEY: &[u8] = b"combination";
const WINNER_KEY: &[u8] = b"winner";
const POLL_KEY: &[u8] = b"poll";
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct State {
pub owner: CanonicalAddr,
pub blockTimePlay: u64,
pub everyBlockTimePlay: u64,
pub blockClaim: u64,
pub blockIcoTimeframe: u64,
pub everyBlockHeight: u64,
pub denomTicket: String,
pub denomDelegation: String,
pub denomDelegationDecimal: Uint128,
pub denomShare: String,
pub claimTicket: Vec<CanonicalAddr>,
pub claimReward: Vec<CanonicalAddr>,
pub holdersRewards: Uint128,
pub tokenHolderSupply: Uint128,
pub drandPublicKey: Binary,
pub drandPeriod: u64,
pub drandGenesisTime: u64,
pub validatorMinAmountToAllowClaim: Uint128,
pub delegatorMinAmountInDelegation: Uint128,
pub combinationLen: u8,
pub jackpotReward: Uint128,
pub jackpotPercentageReward: u8,
pub tokenHolderPercentageFeeReward: u8,
pub feeForDrandWorkerInPercentage: u8,
pub prizeRankWinnerPercentage: Vec<u8>,
pub pollCount: u64,
pub holdersMaxPercentageReward: u8,
pub workerDrandMaxPercentageReward: u8,
pub pollEndHeight: u64,
}
pub fn config(storage: &mut dyn Storage) -> Singleton<State> {
singleton(storage, CONFIG_KEY)
}
pub fn config_read(storage: &dyn Storage) -> ReadonlySingleton<State> {
singleton_read(storage, CONFIG_KEY)
}
pub fn beacons_storage(storage: &mut dyn Storage) -> PrefixedStorage {
prefixed(storage, BEACONS_KEY)
}
pub fn beacons_storage_read(storage: &dyn Storage) -> ReadonlyPrefixedStorage {
prefixed_read(storage, BEACONS_KEY)
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, Default)]
pub struct Combination {
pub addresses: Vec<CanonicalAddr>,
}
pub fn combination_storage(storage: &mut dyn Storage) -> Bucket<Combination> {
bucket(storage, COMBINATION_KEY)
}
pub fn combination_storage_read(storage: &dyn Storage) -> ReadonlyBucket<Combination> {
bucket_read(storage, COMBINATION_KEY)
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct WinnerInfoState {
pub claimed: bool,
pub address: CanonicalAddr,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, Default)]
pub struct Winner {
pub winners: Vec<WinnerInfoState>,
}
pub fn winner_storage(storage: &mut dyn Storage) -> Bucket<Winner> {
bucket(storage, WINNER_KEY)
}
pub fn winner_storage_read(storage: &dyn Storage) -> ReadonlyBucket<Winner> {
bucket_read(storage, WINNER_KEY)
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub enum PollStatus {
InProgress,
Passed,
Rejected,
RejectedByCreator,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub enum Proposal {
MinAmountDelegator,
MinAmountValidator,
LotteryEveryBlockTime,
HolderFeePercentage,
DrandWorkerFeePercentage,
PrizePerRank,
JackpotRewardPercentage,
ClaimEveryBlock,
// test purpose
NotExist,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct PollInfoState {
pub creator: CanonicalAddr,
pub status: PollStatus,
pub end_height: u64,
pub start_height: u64,
pub description: String,
pub yes_voters: Vec<CanonicalAddr>,
pub no_voters: Vec<CanonicalAddr>,
pub amount: Uint128,
pub prizeRank: Vec<u8>,
pub proposal: Proposal,
}
pub fn poll_storage(storage: &mut dyn Storage) -> Bucket<PollInfoState> {
bucket(storage, POLL_KEY)
}
pub fn poll_storage_read(storage: &dyn Storage) -> ReadonlyBucket<PollInfoState> {
bucket_read(storage, POLL_KEY)
}
/*
pub fn combination_storage(storage: &mut dyn Storage) -> PrefixedStorage{
prefixed(storage, COMBINATION_KEY)
}
pub fn combination_storage_read(storage: &dyn Storage) -> ReadonlyPrefixedStorage{
prefixed_read(storage, COMBINATION_KEY)
}*/
|
use reader::*;
fn get_imports(namespace: &'static str, name: &'static str) -> BTreeMap<&'static str, TypeInclude> {
let reader = TypeReader::get_mut();
reader.import_type(namespace, name);
let mut map = BTreeMap::<&'static str, TypeInclude>::new();
fn walk(tree: &mut TypeTree, map: &mut BTreeMap<&'static str, TypeInclude>) {
if tree.include {
for (name, entry) in &mut tree.types {
if entry.include != TypeInclude::None {
map.insert(name, entry.include);
entry.include = TypeInclude::None;
}
}
tree.namespaces.values_mut().for_each(|mut tree| walk(&mut tree, map));
}
}
walk(&mut reader.types, &mut map);
map
}
#[test]
fn test_dependencies() {
let imports = get_imports("Windows.Win32.System.Com", "CreateUri");
assert_eq!(imports.len(), 4);
assert!(imports["CreateUri"] == TypeInclude::Full);
assert!(imports["PWSTR"] == TypeInclude::Minimal);
assert!(imports["URI_CREATE_FLAGS"] == TypeInclude::Minimal);
assert!(imports["IUri"] == TypeInclude::Minimal);
let imports = get_imports("Windows.Win32.Graphics.Direct2D", "ID2D1ImageSource");
assert_eq!(imports.len(), 5);
assert!(imports["ID2D1ImageSource"] == TypeInclude::Full);
assert!(imports["ID2D1Image"] == TypeInclude::Full); // full because ID2D1ImageSource derives from it
assert!(imports["ID2D1Resource"] == TypeInclude::Full); // full because ID2D1Image derives from it
assert!(imports["BOOL"] == TypeInclude::Minimal); // from ID2D1ImageSource method
assert!(imports["ID2D1Factory"] == TypeInclude::Minimal); // from ID2D1Resource method
let imports = get_imports("Windows.Win32.Graphics.Direct3D12", "D3D12_INDIRECT_ARGUMENT_DESC");
assert_eq!(imports.len(), 2);
assert!(imports["D3D12_INDIRECT_ARGUMENT_DESC"] == TypeInclude::Full);
assert!(imports["D3D12_INDIRECT_ARGUMENT_TYPE"] == TypeInclude::Minimal);
let imports = get_imports("Windows.Win32.Graphics.Dxgi", "DXGI_FRAME_STATISTICS_MEDIA");
assert_eq!(imports.len(), 2);
assert!(imports["DXGI_FRAME_STATISTICS_MEDIA"] == TypeInclude::Full);
assert!(imports["DXGI_FRAME_PRESENTATION_MODE"] == TypeInclude::Minimal);
let imports = get_imports("Windows.Win32.Graphics.Dxgi", "DXGI_FRAME_PRESENTATION_MODE");
assert_eq!(imports.len(), 1);
assert!(imports["DXGI_FRAME_PRESENTATION_MODE"] == TypeInclude::Full);
let imports = get_imports("Windows.Foundation", "IAsyncAction");
assert_eq!(imports.len(), 4);
assert!(imports["IAsyncAction"] == TypeInclude::Full);
assert!(imports["IAsyncInfo"] == TypeInclude::Full);
assert!(imports["AsyncActionCompletedHandler"] == TypeInclude::Full);
assert!(imports["AsyncStatus"] == TypeInclude::Full);
let imports = get_imports("Windows.Foundation", "AsyncActionCompletedHandler");
assert_eq!(imports.len(), 4);
assert!(imports["AsyncActionCompletedHandler"] == TypeInclude::Full);
assert!(imports["IAsyncAction"] == TypeInclude::Full);
assert!(imports["AsyncStatus"] == TypeInclude::Full);
assert!(imports["IAsyncInfo"] == TypeInclude::Full);
let imports = get_imports("Windows.Foundation.Collections", "StringMap");
assert_eq!(imports.len(), 11);
assert!(imports["StringMap"] == TypeInclude::Full);
assert!(imports["IMap"] == TypeInclude::Full);
assert!(imports["IIterable"] == TypeInclude::Full);
assert!(imports["IIterator"] == TypeInclude::Full);
assert!(imports["IKeyValuePair"] == TypeInclude::Full);
assert!(imports["IObservableMap"] == TypeInclude::Full);
assert!(imports["EventRegistrationToken"] == TypeInclude::Full);
assert!(imports["IMapChangedEventArgs"] == TypeInclude::Full);
assert!(imports["IMapView"] == TypeInclude::Full);
assert!(imports["MapChangedEventHandler"] == TypeInclude::Full);
assert!(imports["CollectionChange"] == TypeInclude::Full);
let imports = get_imports("Windows.Win32.Graphics.Direct3D11", "D3D11_DEPTH_STENCIL_VIEW_DESC");
assert_eq!(imports.len(), 9);
assert!(imports["D3D11_DEPTH_STENCIL_VIEW_DESC"] == TypeInclude::Full);
assert!(imports["DXGI_FORMAT"] == TypeInclude::Minimal);
assert!(imports["D3D11_DSV_DIMENSION"] == TypeInclude::Minimal);
assert!(imports["D3D11_TEX1D_DSV"] == TypeInclude::Minimal);
assert!(imports["D3D11_TEX1D_ARRAY_DSV"] == TypeInclude::Minimal);
assert!(imports["D3D11_TEX2D_DSV"] == TypeInclude::Minimal);
assert!(imports["D3D11_TEX2D_ARRAY_DSV"] == TypeInclude::Minimal);
assert!(imports["D3D11_TEX2DMS_DSV"] == TypeInclude::Minimal);
assert!(imports["D3D11_TEX2DMS_ARRAY_DSV"] == TypeInclude::Minimal);
let imports = get_imports("Component.Interfaces", "Test");
assert_eq!(imports.len(), 3);
assert!(imports["Test"] == TypeInclude::Full);
assert!(imports["IRequires"] == TypeInclude::Minimal);
let imports = get_imports("Component.Dependencies", "ISimple");
assert_eq!(imports.len(), 1);
assert!(imports["ISimple"] == TypeInclude::Full);
let imports = get_imports("Component.Dependencies", "IRequiredDependencies");
assert_eq!(imports.len(), 5);
assert!(imports["IRequiredDependencies"] == TypeInclude::Full);
assert!(imports["ISimple"] == TypeInclude::Full);
assert!(imports["IStringable"] == TypeInclude::Full);
assert!(imports["IIterable"] == TypeInclude::Full);
assert!(imports["IIterator"] == TypeInclude::Full);
let imports = get_imports("Component.Dependencies", "IMethodDependencies");
assert_eq!(imports.len(), 5);
assert!(imports["IMethodDependencies"] == TypeInclude::Full);
assert!(imports["ISimple"] == TypeInclude::Minimal);
assert!(imports["IStringable"] == TypeInclude::Full);
assert!(imports["IIterable"] == TypeInclude::Full);
assert!(imports["IIterator"] == TypeInclude::Full);
}
|
use std::path::Path;
use crow::{Context, LoadTextureError, Texture};
use crow_anim::Sprite;
use crate::config::SpriteSheetConfig;
#[derive(Debug, Clone)]
pub struct SpriteSheet {
pub sprites: Vec<Sprite>,
}
impl SpriteSheet {
pub fn from_config(
ctx: &mut Context,
config: &SpriteSheetConfig,
) -> Result<Self, LoadTextureError> {
let mut builder = Self::build(ctx, &config.image_path)?;
for sprite in &config.sprites {
builder.add_sprite(sprite.position, sprite.size, sprite.offset);
}
Ok(builder.finish())
}
pub fn build<P: AsRef<Path>>(
ctx: &mut Context,
path: P,
) -> Result<SpriteSheetBuilder, LoadTextureError> {
SpriteSheetBuilder::new(ctx, path)
}
pub fn count(&self) -> usize {
self.sprites.len()
}
pub fn get(&self, idx: usize) -> Sprite {
self.sprites[idx].clone()
}
pub fn iter<'a>(&'a self) -> impl Iterator<Item = Sprite> + 'a {
self.sprites.iter().cloned()
}
}
pub struct SpriteSheetBuilder {
pub texture: Texture,
sprites: Vec<Sprite>,
}
impl SpriteSheetBuilder {
pub fn new<P: AsRef<Path>>(ctx: &mut Context, path: P) -> Result<Self, LoadTextureError> {
Texture::load(ctx, path).map(SpriteSheetBuilder::from_texture)
}
pub fn from_texture(texture: Texture) -> Self {
SpriteSheetBuilder {
texture,
sprites: Vec::new(),
}
}
pub fn add_sprite(&mut self, position: (u32, u32), size: (u32, u32), offset: (i32, i32)) {
self.sprites.push(Sprite {
texture: self.texture.get_section(position, size),
offset,
})
}
pub fn finish(self) -> SpriteSheet {
SpriteSheet {
sprites: self.sprites,
}
}
}
|
use std::borrow::Cow;
use std::cell::{Cell, RefCell};
use std::collections::HashSet;
use std::default::Default;
use std::fmt;
use std::mem;
use std::rc::{Rc, Weak};
use std::str;
use html5ever::interface::tree_builder::{
ElementFlags, NoQuirks, NodeOrText, QuirksMode, TreeSink,
};
use html5ever::tendril::*;
use html5ever::{Attribute, ExpandedName, QualName};
/// The different kinds of nodes in the DOM.
#[derive(Debug)]
pub enum NodeData {
/// The `Document` itself - the root node of a HTML document.
Document,
/// [dtd wiki]: https://en.wikipedia.org/wiki/Document_type_declaration
Doctype {
name: StrTendril,
public_id: StrTendril,
system_id: StrTendril,
},
/// A text node.
Text { contents: RefCell<StrTendril> },
/// A comment.
Comment { contents: StrTendril },
/// An element with attributes.
Element {
name: QualName,
attrs: RefCell<Vec<Attribute>>,
/// [template contents]: https://html.spec.whatwg.org/multipage/#template-contents
template_contents: RefCell<Option<Handle>>,
/// [HTML integration point]: https://html.spec.whatwg.org/multipage/#html-integration-point
mathml_annotation_xml_integration_point: bool,
},
/// A Processing instruction.
ProcessingInstruction {
target: StrTendril,
contents: StrTendril,
},
}
/// A DOM node.
pub struct Node {
pub parent: Cell<Option<WeakHandle>>,
pub children: RefCell<Vec<Handle>>,
pub data: NodeData,
}
impl Node {
/// Create a new node from its contents
pub fn new(data: NodeData) -> Rc<Self> {
Rc::new(Node {
data: data,
parent: Cell::new(None),
children: RefCell::new(Vec::new()),
})
}
}
impl fmt::Debug for Node {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Node").field("data", &self.data).finish()
}
}
pub type WeakHandle = Weak<Node>;
pub type Handle = Rc<Node>;
/// The DOM itself; the result of parsing.
pub struct Dom {
pub document: Handle,
pub errors: Vec<Cow<'static, str>>,
pub quirks_mode: QuirksMode,
}
impl Default for Dom {
fn default() -> Dom {
Dom {
document: Node::new(NodeData::Document),
errors: vec![],
quirks_mode: NoQuirks,
}
}
}
/// Append a parentless node to another nodes' children
pub fn append(new_parent: &Handle, child: Handle) {
let previous_parent = child.parent.replace(Some(Rc::downgrade(new_parent)));
// Invariant: child cannot have existing parent
assert!(previous_parent.is_none());
new_parent.children.borrow_mut().push(child);
}
/// If the node has a parent, get it and this node's position in its children
fn get_parent_and_index(target: &Handle) -> Option<(Handle, usize)> {
if let Some(weak) = target.parent.take() {
let parent = weak.upgrade().expect("dangling weak pointer");
target.parent.set(Some(weak));
let i = match parent
.children
.borrow()
.iter()
.enumerate()
.find(|&(_, child)| Rc::ptr_eq(&child, &target))
{
Some((i, _)) => i,
None => panic!("have parent but couldn't find in parent's children!"),
};
Some((parent, i))
} else {
None
}
}
fn append_to_existing_text(prev: &Handle, text: &str) -> bool {
match prev.data {
NodeData::Text { ref contents } => {
contents.borrow_mut().push_slice(text);
true
}
_ => false,
}
}
pub fn remove_from_parent(target: &Handle) {
if let Some((parent, i)) = get_parent_and_index(target) {
parent.children.borrow_mut().remove(i);
target.parent.set(None);
}
}
impl TreeSink for Dom {
type Output = Self;
fn finish(self) -> Self {
self
}
type Handle = Handle;
fn parse_error(&mut self, msg: Cow<'static, str>) {
self.errors.push(msg);
}
fn get_document(&mut self) -> Handle {
self.document.clone()
}
fn get_template_contents(&mut self, target: &Handle) -> Handle {
if let NodeData::Element {
ref template_contents,
..
} = target.data
{
template_contents
.borrow()
.as_ref()
.expect("not a template element!")
.clone()
} else {
panic!("not a template element!")
}
}
fn set_quirks_mode(&mut self, mode: QuirksMode) {
self.quirks_mode = mode;
}
fn same_node(&self, x: &Handle, y: &Handle) -> bool {
Rc::ptr_eq(x, y)
}
fn elem_name<'a>(&self, target: &'a Handle) -> ExpandedName<'a> {
return match target.data {
NodeData::Element { ref name, .. } => name.expanded(),
_ => panic!("not an element!"),
};
}
fn create_element(
&mut self,
name: QualName,
attrs: Vec<Attribute>,
flags: ElementFlags,
) -> Handle {
Node::new(NodeData::Element {
name,
attrs: RefCell::new(attrs),
template_contents: RefCell::new(if flags.template {
Some(Node::new(NodeData::Document))
} else {
None
}),
mathml_annotation_xml_integration_point: flags.mathml_annotation_xml_integration_point,
})
}
fn create_comment(&mut self, text: StrTendril) -> Handle {
Node::new(NodeData::Comment { contents: text })
}
fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> Handle {
Node::new(NodeData::ProcessingInstruction {
target: target,
contents: data,
})
}
fn append(&mut self, parent: &Handle, child: NodeOrText<Handle>) {
// Append to an existing Text node if we have one.
match child {
NodeOrText::AppendText(ref text) => match parent.children.borrow().last() {
Some(h) => {
if append_to_existing_text(h, &text) {
return;
}
}
_ => (),
},
_ => (),
}
append(
&parent,
match child {
NodeOrText::AppendText(text) => Node::new(NodeData::Text {
contents: RefCell::new(text),
}),
NodeOrText::AppendNode(node) => node,
},
);
}
fn append_before_sibling(&mut self, sibling: &Handle, child: NodeOrText<Handle>) {
let (parent, i) = get_parent_and_index(&sibling)
.expect("append_before_sibling called on node without parent");
let child = match (child, i) {
// No previous node.
(NodeOrText::AppendText(text), 0) => Node::new(NodeData::Text {
contents: RefCell::new(text),
}),
// Look for a text node before the insertion point.
(NodeOrText::AppendText(text), i) => {
let children = parent.children.borrow();
let prev = &children[i - 1];
if append_to_existing_text(prev, &text) {
return;
}
Node::new(NodeData::Text {
contents: RefCell::new(text),
})
}
// The tree builder promises we won't have a text node after
// the insertion point.
// Any other kind of node.
(NodeOrText::AppendNode(node), _) => node,
};
remove_from_parent(&child);
child.parent.set(Some(Rc::downgrade(&parent)));
parent.children.borrow_mut().insert(i, child);
}
fn append_based_on_parent_node(
&mut self,
element: &Self::Handle,
prev_element: &Self::Handle,
child: NodeOrText<Self::Handle>,
) {
let parent = element.parent.take();
let has_parent = parent.is_some();
element.parent.set(parent);
if has_parent {
self.append_before_sibling(element, child);
} else {
self.append(prev_element, child);
}
}
fn append_doctype_to_document(
&mut self,
name: StrTendril,
public_id: StrTendril,
system_id: StrTendril,
) {
append(
&self.document,
Node::new(NodeData::Doctype {
name,
public_id,
system_id,
}),
);
}
fn add_attrs_if_missing(&mut self, target: &Handle, attrs: Vec<Attribute>) {
let mut existing = if let NodeData::Element { ref attrs, .. } = target.data {
attrs.borrow_mut()
} else {
panic!("not an element")
};
let existing_names = existing
.iter()
.map(|e| e.name.clone())
.collect::<HashSet<_>>();
existing.extend(
attrs
.into_iter()
.filter(|attr| !existing_names.contains(&attr.name)),
);
}
fn remove_from_parent(&mut self, target: &Handle) {
remove_from_parent(&target);
}
fn reparent_children(&mut self, node: &Handle, new_parent: &Handle) {
let mut children = node.children.borrow_mut();
let mut new_children = new_parent.children.borrow_mut();
for child in children.iter() {
let previous_parent = child.parent.replace(Some(Rc::downgrade(&new_parent)));
assert!(Rc::ptr_eq(
&node,
&previous_parent.unwrap().upgrade().expect("dangling weak")
))
}
new_children.extend(mem::replace(&mut *children, Vec::new()));
}
fn is_mathml_annotation_xml_integration_point(&self, target: &Handle) -> bool {
if let NodeData::Element {
mathml_annotation_xml_integration_point,
..
} = target.data
{
mathml_annotation_xml_integration_point
} else {
panic!("not an element!")
}
}
}
|
//https://leetcode.com/problems/generate-a-string-with-characters-that-have-odd-counts/
impl Solution {
pub fn generate_the_string(n: i32) -> String {
let mut odd_count = String::from("");
for i in 0..n-1 {
odd_count.push('a');
}
match n%2 == 0 {
true => odd_count.push('b'),
false => odd_count.push('a')
}
odd_count
}
} |
use std::iter::FromIterator;
pub fn invoke<I>(iter: I) -> Vec<I::Item>
where
I: Iterator,
{
iter.collect()
}
pub fn try_invoke<I, T, E>(iter: I) -> Result<Vec<T>, E>
where
I: Iterator<Item = Result<T, E>>,
{
Result::from_iter(iter)
}
pub fn exec<I>(iter: I)
where
I: Iterator,
{
iter.for_each(drop);
}
pub fn try_exec<I, T, E>(iter: I) -> Result<(), E>
where
I: Iterator<Item = Result<T, E>>,
{
for res in iter {
if let Err(err) = res {
return Err(err);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
extern crate self as brace_hook;
use brace_hook::hook;
#[hook]
fn my_hook(input: &str) -> Result<String, String> {}
#[hook(my_hook, 1)]
fn my_hook_1(input: &str) -> Result<String, String> {
Ok(format!("my_hook_1: {}", input))
}
#[hook(my_hook, 2)]
fn my_hook_2(input: &str) -> Result<String, String> {
Ok(format!("my_hook_2: {}", input))
}
#[test]
fn test_invoke() {
let res = hook::invoke(my_hook::with("hello"));
assert_eq!(res.len(), 2);
assert_eq!(res[0], Ok(String::from("my_hook_1: hello")));
assert_eq!(res[1], Ok(String::from("my_hook_2: hello")));
}
#[test]
fn test_try_invoke_ok() {
let res = hook::try_invoke(my_hook::with("hello"));
assert!(res.is_ok());
let res = res.unwrap();
assert_eq!(res.len(), 2);
assert_eq!(res[0], String::from("my_hook_1: hello"));
assert_eq!(res[1], String::from("my_hook_2: hello"));
}
#[hook]
fn my_bad_hook(input: &str) -> Result<String, String> {}
#[hook(my_bad_hook, 1)]
fn my_bad_hook_1(input: &str) -> Result<String, String> {
Err(format!("my_bad_hook_1: {}", input))
}
#[hook(my_bad_hook, 2)]
fn my_bad_hook_2(input: &str) -> Result<String, String> {
Ok(format!("my_bad_hook_2: {}", input))
}
#[test]
fn test_try_invoke_err() {
let res = hook::try_invoke(my_bad_hook::with("hello"));
assert!(res.is_err());
assert_eq!(res, Err(String::from("my_bad_hook_1: hello")));
}
#[hook]
fn my_mut_hook(output: &mut Vec<&str>) -> Result<(), String> {}
#[hook(my_mut_hook, 1)]
fn my_mut_hook_1(output: &mut Vec<&str>) -> Result<(), String> {
output.push("my_mut_hook_1");
Ok(())
}
#[hook(my_mut_hook, 2)]
fn my_mut_hook_2(output: &mut Vec<&str>) -> Result<(), String> {
output.push("my_mut_hook_2");
Ok(())
}
#[test]
fn test_exec() {
let mut items = Vec::new();
hook::exec(my_mut_hook::with(&mut items));
assert_eq!(items.len(), 2);
assert_eq!(items[0], "my_mut_hook_1");
assert_eq!(items[1], "my_mut_hook_2");
}
#[test]
fn test_try_exec_ok() {
let mut items = Vec::new();
let res = hook::try_exec(my_mut_hook::with(&mut items));
assert!(res.is_ok());
assert_eq!(items.len(), 2);
assert_eq!(items[0], "my_mut_hook_1");
assert_eq!(items[1], "my_mut_hook_2");
}
#[hook]
fn my_bad_mut_hook(output: &mut Vec<&str>) -> Result<(), String> {}
#[hook(my_bad_mut_hook, 1)]
fn my_bad_mut_hook_1(output: &mut Vec<&str>) -> Result<(), String> {
output.push("my_bad_mut_hook_1");
Err(String::from("my_bad_mut_hook_1"))
}
#[hook(my_bad_mut_hook, 2)]
fn my_bad_mut_hook_2(output: &mut Vec<&str>) -> Result<(), String> {
output.push("my_bad_mut_hook_2");
Ok(())
}
#[test]
fn test_try_exec_err() {
let mut items = Vec::new();
let res = hook::try_exec(my_bad_mut_hook::with(&mut items));
assert!(res.is_err());
assert_eq!(res, Err(String::from("my_bad_mut_hook_1")));
assert_eq!(items.len(), 1);
assert_eq!(items[0], "my_bad_mut_hook_1");
}
}
|
#[macro_use]
extern crate log;
extern crate cgmath;
extern crate rustc_serialize;
#[macro_use]
extern crate gfx;
extern crate gfx_device_gl;
extern crate gfx_window_glutin;
extern crate glutin;
extern crate image;
extern crate find_folder;
extern crate specs;
extern crate time;
extern crate env_logger;
extern crate rand;
mod art;
mod components;
mod core;
mod events;
mod graphics;
mod systems;
mod utils;
|
use std::env;
fn main() {
if cfg!(feature = "dynamic_linking") {
let path = env::var("CLINGO_LIBRARY_PATH").expect("$CLINGO_LIBRARY_PATH should be defined");
println!("cargo:rustc-link-search=native={}", path);
println!("cargo:rustc-link-lib=dylib=clingo");
} else {
// update clingo submodule
// git submodule update --init --recursive
// create bindings
// let bindings = bindgen::Builder::default()
// .header("clingo/libclingo/clingo.h")
// .no_copy("clingo_solve_control")
// .no_copy("clingo_model")
// .no_copy("clingo_solve_handle")
// .no_copy("clingo_program_builder")
// .no_copy("clingo_control")
// .no_copy("clingo_options")
// .no_copy("clingo_symbolic_atoms")
// .no_copy("clingo_theory_atoms")
// .no_copy("clingo_assignment")
// .no_copy("clingo_propagate_init")
// .no_copy("clingo_propagate_control")
// .no_copy("clingo_backend")
// .no_copy("clingo_configuration")
// .no_copy("clingo_statistic")
// // .no_copy("clingo_ast_term")
// // .no_copy("clingo_ast_function")
// // .no_copy("clingo_ast_pool")
// // .no_copy("clingo_ast_csp_product_term_t")
// .blacklist_type("max_align_t") // https://github.com/rust-lang/rust-bindgen/issues/550
// .size_t_is_usize(true)
// .generate()
// .expect("Unable to generate bindings");
// write the bindings to the bindings.rs file.
// bindings
// .write_to_file("bindings.rs")
// .expect("Couldn't write bindings!");
// build clingo for static linking
use cmake::Config;
let dst = Config::new("clingo")
.very_verbose(true)
.define("CLINGO_BUILD_SHARED", "OFF")
.define("CLINGO_BUILD_STATIC", "ON")
.define("CLINGO_MANAGE_RPATH", "OFF")
.define("CLINGO_BUILD_WITH_PYTHON", "OFF")
.define("CLINGO_BUILD_WITH_LUA", "OFF")
.define("CLINGO_INSTALL_LIB", "ON")
.define("CLINGO_BUILD_APPS", "OFF")
.define("CLASP_BUILD_APP", "OFF")
.build();
println!(
"cargo:rustc-link-search=native={}",
dst.join("lib").display()
);
println!("cargo:rustc-link-lib=static=clingo");
println!("cargo:rustc-link-lib=static=reify");
println!("cargo:rustc-link-lib=static=potassco");
println!("cargo:rustc-link-lib=static=clasp");
println!("cargo:rustc-link-lib=static=gringo");
if cfg!(target_os = "linux") {
println!("cargo:rustc-link-lib=dylib=stdc++");
} else if cfg!(target_os = "macos") {
println!("cargo:rustc-link-lib=dylib=c++");
}
}
// println!("cargo:rustc-link-lib=python3.6m");
// -DWITH_PYTHON=1 -I/usr/include/python3.6m
}
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from ../gir-files
// DO NOT EDIT
use crate::Address;
use crate::AuthDomain;
use crate::Socket;
use glib::translate::*;
glib::wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ClientContext(Boxed<ffi::SoupClientContext>);
match fn {
copy => |ptr| glib::gobject_ffi::g_boxed_copy(ffi::soup_client_context_get_type(), ptr as *mut _) as *mut ffi::SoupClientContext,
free => |ptr| glib::gobject_ffi::g_boxed_free(ffi::soup_client_context_get_type(), ptr as *mut _),
type_ => || ffi::soup_client_context_get_type(),
}
}
impl ClientContext {
#[doc(alias = "soup_client_context_get_address")]
#[doc(alias = "get_address")]
pub fn address(&mut self) -> Option<Address> {
unsafe {
from_glib_none(ffi::soup_client_context_get_address(self.to_glib_none_mut().0))
}
}
#[doc(alias = "soup_client_context_get_auth_domain")]
#[doc(alias = "get_auth_domain")]
pub fn auth_domain(&mut self) -> Option<AuthDomain> {
unsafe {
from_glib_none(ffi::soup_client_context_get_auth_domain(self.to_glib_none_mut().0))
}
}
#[doc(alias = "soup_client_context_get_auth_user")]
#[doc(alias = "get_auth_user")]
pub fn auth_user(&mut self) -> Option<glib::GString> {
unsafe {
from_glib_none(ffi::soup_client_context_get_auth_user(self.to_glib_none_mut().0))
}
}
#[cfg(any(feature = "v2_48", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_48")))]
#[doc(alias = "soup_client_context_get_gsocket")]
#[doc(alias = "get_gsocket")]
pub fn gsocket(&mut self) -> Option<gio::Socket> {
unsafe {
from_glib_none(ffi::soup_client_context_get_gsocket(self.to_glib_none_mut().0))
}
}
#[doc(alias = "soup_client_context_get_host")]
#[doc(alias = "get_host")]
pub fn host(&mut self) -> Option<glib::GString> {
unsafe {
from_glib_none(ffi::soup_client_context_get_host(self.to_glib_none_mut().0))
}
}
#[cfg(any(feature = "v2_48", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_48")))]
#[doc(alias = "soup_client_context_get_local_address")]
#[doc(alias = "get_local_address")]
pub fn local_address(&mut self) -> Option<gio::SocketAddress> {
unsafe {
from_glib_none(ffi::soup_client_context_get_local_address(self.to_glib_none_mut().0))
}
}
#[cfg(any(feature = "v2_48", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_48")))]
#[doc(alias = "soup_client_context_get_remote_address")]
#[doc(alias = "get_remote_address")]
pub fn remote_address(&mut self) -> Option<gio::SocketAddress> {
unsafe {
from_glib_none(ffi::soup_client_context_get_remote_address(self.to_glib_none_mut().0))
}
}
#[doc(alias = "soup_client_context_get_socket")]
#[doc(alias = "get_socket")]
pub fn socket(&mut self) -> Option<Socket> {
unsafe {
from_glib_none(ffi::soup_client_context_get_socket(self.to_glib_none_mut().0))
}
}
#[cfg(any(feature = "v2_50", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_50")))]
#[doc(alias = "soup_client_context_steal_connection")]
pub fn steal_connection(&mut self) -> Option<gio::IOStream> {
unsafe {
from_glib_full(ffi::soup_client_context_steal_connection(self.to_glib_none_mut().0))
}
}
}
|
//! Implementation of statistics based pruning
use crate::QueryChunk;
use arrow::{
array::{ArrayRef, UInt64Array},
datatypes::{DataType, SchemaRef},
};
use datafusion::{
physical_expr::execution_props::ExecutionProps,
physical_optimizer::pruning::PruningStatistics,
physical_plan::{ColumnStatistics, Statistics},
prelude::{col, lit_timestamp_nano, Column, Expr},
scalar::ScalarValue,
};
use datafusion_util::create_pruning_predicate;
use observability_deps::tracing::{debug, trace, warn};
use predicate::Predicate;
use query_functions::group_by::Aggregate;
use schema::{Schema, TIME_COLUMN_NAME};
use std::sync::Arc;
/// Reason why a chunk could not be pruned.
///
/// Also see [`PruningObserver::could_not_prune`].
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum NotPrunedReason {
/// No expression on predicate
NoExpressionOnPredicate,
/// Can not create pruning predicate
CanNotCreatePruningPredicate,
/// DataFusion pruning failed
DataFusionPruningFailed,
}
impl NotPrunedReason {
/// Human-readable string representation.
pub fn name(&self) -> &'static str {
match self {
Self::NoExpressionOnPredicate => "No expression on predicate",
Self::CanNotCreatePruningPredicate => "Can not create pruning predicate",
Self::DataFusionPruningFailed => "DataFusion pruning failed",
}
}
}
impl std::fmt::Display for NotPrunedReason {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.name())
}
}
/// Something that cares to be notified when pruning of chunks occurs
pub trait PruningObserver {
/// Called when the specified chunk was pruned
fn was_pruned(&self, _chunk: &dyn QueryChunk) {}
/// Called when a chunk was not pruned.
fn was_not_pruned(&self, _chunk: &dyn QueryChunk) {}
/// Called when no pruning can happen at all for some reason.
///
/// Since pruning is optional and _only_ improves performance but its lack does not affect correctness, this will
/// NOT lead to a query error.
///
/// In this case, statistical pruning will not happen and neither [`was_pruned`](Self::was_pruned) nor
/// [`was_not_pruned`](Self::was_not_pruned) will be called.
fn could_not_prune(&self, _reason: NotPrunedReason, _chunk: &dyn QueryChunk) {}
}
/// Given a Vec of prunable items, returns a possibly smaller set
/// filtering those where the predicate can be proven to evaluate to
/// `false` for every single row.
pub fn prune_chunks(
table_schema: &Schema,
chunks: &[Arc<dyn QueryChunk>],
predicate: &Predicate,
) -> Result<Vec<bool>, NotPrunedReason> {
let num_chunks = chunks.len();
debug!(num_chunks, %predicate, "Pruning chunks");
let summaries: Vec<_> = chunks
.iter()
.map(|c| (c.stats(), c.schema().as_arrow()))
.collect();
prune_summaries(table_schema, &summaries, predicate)
}
/// Given a `Vec` of pruning summaries, return a `Vec<bool>` where `false` indicates that the
/// predicate can be proven to evaluate to `false` for every single row.
pub fn prune_summaries(
table_schema: &Schema,
summaries: &[(Arc<Statistics>, SchemaRef)],
predicate: &Predicate,
) -> Result<Vec<bool>, NotPrunedReason> {
let filter_expr = match predicate.filter_expr() {
Some(expr) => expr,
None => {
debug!("No expression on predicate");
return Err(NotPrunedReason::NoExpressionOnPredicate);
}
};
trace!(%filter_expr, "Filter_expr of pruning chunks");
// no information about the queries here
let props = ExecutionProps::new();
let pruning_predicate =
match create_pruning_predicate(&props, &filter_expr, &table_schema.as_arrow()) {
Ok(p) => p,
Err(e) => {
warn!(%e, ?filter_expr, "Can not create pruning predicate");
return Err(NotPrunedReason::CanNotCreatePruningPredicate);
}
};
let statistics = ChunkPruningStatistics {
table_schema,
summaries,
};
let results = match pruning_predicate.prune(&statistics) {
Ok(results) => results,
Err(e) => {
warn!(%e, ?filter_expr, "DataFusion pruning failed");
return Err(NotPrunedReason::DataFusionPruningFailed);
}
};
Ok(results)
}
/// Wraps a collection of [`QueryChunk`] and implements the [`PruningStatistics`]
/// interface required for pruning
struct ChunkPruningStatistics<'a> {
table_schema: &'a Schema,
summaries: &'a [(Arc<Statistics>, SchemaRef)],
}
impl<'a> ChunkPruningStatistics<'a> {
/// Returns the [`DataType`] for `column`
fn column_type(&self, column: &Column) -> Option<&DataType> {
let index = self.table_schema.find_index_of(&column.name)?;
Some(self.table_schema.field(index).1.data_type())
}
/// Returns an iterator that for each chunk returns the [`Statistics`]
/// for the provided `column` if any
fn column_summaries<'b: 'a, 'c: 'a>(
&'c self,
column: &'b Column,
) -> impl Iterator<Item = Option<&'a ColumnStatistics>> + 'a {
self.summaries.iter().map(|(stats, schema)| {
let stats = stats.column_statistics.as_ref()?;
let idx = schema.index_of(&column.name).ok()?;
Some(&stats[idx])
})
}
}
impl<'a> PruningStatistics for ChunkPruningStatistics<'a> {
fn min_values(&self, column: &Column) -> Option<ArrayRef> {
let data_type = self.column_type(column)?;
let summaries = self.column_summaries(column);
collect_pruning_stats(data_type, summaries, Aggregate::Min)
}
fn max_values(&self, column: &Column) -> Option<ArrayRef> {
let data_type = self.column_type(column)?;
let summaries = self.column_summaries(column);
collect_pruning_stats(data_type, summaries, Aggregate::Max)
}
fn num_containers(&self) -> usize {
self.summaries.len()
}
fn null_counts(&self, column: &Column) -> Option<ArrayRef> {
let null_counts = self
.column_summaries(column)
.map(|x| x.and_then(|s| s.null_count.map(|x| x as u64)));
Some(Arc::new(UInt64Array::from_iter(null_counts)))
}
}
/// Collects an [`ArrayRef`] containing the aggregate statistic corresponding to
/// `aggregate` for each of the provided [`Statistics`]
fn collect_pruning_stats<'a>(
data_type: &DataType,
statistics: impl Iterator<Item = Option<&'a ColumnStatistics>>,
aggregate: Aggregate,
) -> Option<ArrayRef> {
let null = ScalarValue::try_from(data_type).ok()?;
ScalarValue::iter_to_array(statistics.map(|stats| {
stats
.and_then(|stats| get_aggregate(stats, aggregate).cloned())
.unwrap_or_else(|| null.clone())
}))
.ok()
}
/// Returns the aggregate statistic corresponding to `aggregate` from `stats`
fn get_aggregate(stats: &ColumnStatistics, aggregate: Aggregate) -> Option<&ScalarValue> {
match aggregate {
Aggregate::Min => stats.min_value.as_ref(),
Aggregate::Max => stats.max_value.as_ref(),
_ => None,
}
}
/// Retention time expression, "time > retention_time".
pub fn retention_expr(retention_time: i64) -> Expr {
col(TIME_COLUMN_NAME).gt(lit_timestamp_nano(retention_time))
}
#[cfg(test)]
mod test {
use std::{ops::Not, sync::Arc};
use datafusion::prelude::{col, lit};
use datafusion_util::lit_dict;
use predicate::Predicate;
use schema::merge::SchemaMerger;
use crate::{test::TestChunk, QueryChunk};
use super::*;
#[test]
fn test_empty() {
test_helpers::maybe_start_logging();
let c1 = Arc::new(TestChunk::new("chunk1"));
let predicate = Predicate::new();
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result, Err(NotPrunedReason::NoExpressionOnPredicate));
}
#[test]
fn test_pruned_f64() {
test_helpers::maybe_start_logging();
// column1 > 100.0 where
// c1: [0.0, 10.0] --> pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_f64_field_column_with_stats(
"column1",
Some(0.0),
Some(10.0),
));
let predicate = Predicate::new().with_expr(col("column1").gt(lit(100.0f64)));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![false]);
}
#[test]
fn test_pruned_i64() {
test_helpers::maybe_start_logging();
// column1 > 100 where
// c1: [0, 10] --> pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_i64_field_column_with_stats(
"column1",
Some(0),
Some(10),
));
let predicate = Predicate::new().with_expr(col("column1").gt(lit(100i64)));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![false]);
}
#[test]
fn test_pruned_u64() {
test_helpers::maybe_start_logging();
// column1 > 100 where
// c1: [0, 10] --> pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_u64_field_column_with_stats(
"column1",
Some(0),
Some(10),
));
let predicate = Predicate::new().with_expr(col("column1").gt(lit(100u64)));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![false]);
}
#[test]
fn test_pruned_bool() {
test_helpers::maybe_start_logging();
// column1 where
// c1: [false, false] --> pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_bool_field_column_with_stats(
"column1",
Some(false),
Some(false),
));
let predicate = Predicate::new().with_expr(col("column1"));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![false; 1]);
}
#[test]
fn test_pruned_string() {
test_helpers::maybe_start_logging();
// column1 > "z" where
// c1: ["a", "q"] --> pruned
let c1 = Arc::new(
TestChunk::new("chunk1").with_string_field_column_with_stats(
"column1",
Some("a"),
Some("q"),
),
);
let predicate = Predicate::new().with_expr(col("column1").gt(lit("z")));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![false]);
}
#[test]
fn test_not_pruned_f64() {
test_helpers::maybe_start_logging();
// column1 < 100.0 where
// c1: [0.0, 10.0] --> not pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_f64_field_column_with_stats(
"column1",
Some(0.0),
Some(10.0),
));
let predicate = Predicate::new().with_expr(col("column1").lt(lit(100.0f64)));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![true]);
}
#[test]
fn test_not_pruned_i64() {
test_helpers::maybe_start_logging();
// column1 < 100 where
// c1: [0, 10] --> not pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_i64_field_column_with_stats(
"column1",
Some(0),
Some(10),
));
let predicate = Predicate::new().with_expr(col("column1").lt(lit(100i64)));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![true]);
}
#[test]
fn test_not_pruned_u64() {
test_helpers::maybe_start_logging();
// column1 < 100 where
// c1: [0, 10] --> not pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_u64_field_column_with_stats(
"column1",
Some(0),
Some(10),
));
let predicate = Predicate::new().with_expr(col("column1").lt(lit(100u64)));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![true]);
}
#[test]
fn test_not_pruned_bool() {
test_helpers::maybe_start_logging();
// column1
// c1: [false, true] --> not pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_bool_field_column_with_stats(
"column1",
Some(false),
Some(true),
));
let predicate = Predicate::new().with_expr(col("column1"));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![true]);
}
#[test]
fn test_not_pruned_string() {
test_helpers::maybe_start_logging();
// column1 < "z" where
// c1: ["a", "q"] --> not pruned
let c1 = Arc::new(
TestChunk::new("chunk1").with_string_field_column_with_stats(
"column1",
Some("a"),
Some("q"),
),
);
let predicate = Predicate::new().with_expr(col("column1").lt(lit("z")));
let result = prune_chunks(&c1.schema().clone(), &[c1], &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![true]);
}
fn merge_schema(chunks: &[Arc<dyn QueryChunk>]) -> Schema {
let mut merger = SchemaMerger::new();
for chunk in chunks {
merger = merger.merge(chunk.schema()).unwrap();
}
merger.build()
}
#[test]
fn test_pruned_null() {
test_helpers::maybe_start_logging();
// column1 > 100 where
// c1: [Null, 10] --> pruned
// c2: [0, Null] --> not pruned
// c3: [Null, Null] --> not pruned (min/max are not known in chunk 3)
// c4: Null --> not pruned (no statistics at all)
let c1 = Arc::new(TestChunk::new("chunk1").with_i64_field_column_with_stats(
"column1",
None,
Some(10),
)) as Arc<dyn QueryChunk>;
let c2 = Arc::new(TestChunk::new("chunk2").with_i64_field_column_with_stats(
"column1",
Some(0),
None,
)) as Arc<dyn QueryChunk>;
let c3 = Arc::new(
TestChunk::new("chunk3").with_i64_field_column_with_stats("column1", None, None),
) as Arc<dyn QueryChunk>;
let c4 = Arc::new(TestChunk::new("chunk4").with_i64_field_column("column1"))
as Arc<dyn QueryChunk>;
let predicate = Predicate::new().with_expr(col("column1").gt(lit(100i64)));
let chunks = vec![c1, c2, c3, c4];
let schema = merge_schema(&chunks);
let result = prune_chunks(&schema, &chunks, &predicate);
assert_eq!(
result.expect("pruning succeeds"),
vec![false, true, true, true]
);
}
#[test]
fn test_pruned_multi_chunk() {
test_helpers::maybe_start_logging();
// column1 > 100 where
// c1: [0, 10] --> pruned
// c2: [0, 1000] --> not pruned
// c3: [10, 20] --> pruned
// c4: [None, None] --> not pruned
// c5: [10, None] --> not pruned
// c6: [None, 10] --> pruned
let c1 = Arc::new(TestChunk::new("chunk1").with_i64_field_column_with_stats(
"column1",
Some(0),
Some(10),
)) as Arc<dyn QueryChunk>;
let c2 = Arc::new(TestChunk::new("chunk2").with_i64_field_column_with_stats(
"column1",
Some(0),
Some(1000),
)) as Arc<dyn QueryChunk>;
let c3 = Arc::new(TestChunk::new("chunk3").with_i64_field_column_with_stats(
"column1",
Some(10),
Some(20),
)) as Arc<dyn QueryChunk>;
let c4 = Arc::new(
TestChunk::new("chunk4").with_i64_field_column_with_stats("column1", None, None),
) as Arc<dyn QueryChunk>;
let c5 = Arc::new(TestChunk::new("chunk5").with_i64_field_column_with_stats(
"column1",
Some(10),
None,
)) as Arc<dyn QueryChunk>;
let c6 = Arc::new(TestChunk::new("chunk6").with_i64_field_column_with_stats(
"column1",
None,
Some(20),
)) as Arc<dyn QueryChunk>;
let predicate = Predicate::new().with_expr(col("column1").gt(lit(100i64)));
let chunks = vec![c1, c2, c3, c4, c5, c6];
let schema = merge_schema(&chunks);
let result = prune_chunks(&schema, &chunks, &predicate);
assert_eq!(
result.expect("pruning succeeds"),
vec![false, true, false, true, true, false]
);
}
#[test]
fn test_pruned_different_schema() {
test_helpers::maybe_start_logging();
// column1 > 100 where
// c1: column1 [0, 100], column2 [0, 4] --> pruned (in range, column2 ignored)
// c2: column1 [0, 1000], column2 [0, 4] --> not pruned (in range, column2 ignored)
// c3: None, column2 [0, 4] --> not pruned (no stats for column1)
let c1 = Arc::new(
TestChunk::new("chunk1")
.with_i64_field_column_with_stats("column1", Some(0), Some(100))
.with_i64_field_column_with_stats("column2", Some(0), Some(4)),
) as Arc<dyn QueryChunk>;
let c2 = Arc::new(
TestChunk::new("chunk2")
.with_i64_field_column_with_stats("column1", Some(0), Some(1000))
.with_i64_field_column_with_stats("column2", Some(0), Some(4)),
) as Arc<dyn QueryChunk>;
let c3 = Arc::new(TestChunk::new("chunk3").with_i64_field_column_with_stats(
"column2",
Some(0),
Some(4),
)) as Arc<dyn QueryChunk>;
let predicate = Predicate::new().with_expr(col("column1").gt(lit(100i64)));
let chunks = vec![c1, c2, c3];
let schema = merge_schema(&chunks);
let result = prune_chunks(&schema, &chunks, &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![false, true, true]);
}
#[test]
fn test_pruned_is_null() {
test_helpers::maybe_start_logging();
// Verify that type of predicate is pruned if column1 is null
// (this is a common predicate type created by the INfluxRPC planner)
// (NOT column1 IS NULL) AND (column1 = 'bar')
// No nulls, can't prune as it has values that are more and less than 'bar'
let c1 = Arc::new(
TestChunk::new("chunk1").with_tag_column_with_nulls_and_full_stats(
"column1",
Some("a"),
Some("z"),
100,
None,
0,
),
) as Arc<dyn QueryChunk>;
// Has no nulls, can prune it out based on statistics alone
let c2 = Arc::new(
TestChunk::new("chunk2").with_tag_column_with_nulls_and_full_stats(
"column1",
Some("a"),
Some("b"),
100,
None,
0,
),
) as Arc<dyn QueryChunk>;
// Has nulls, can still can prune it out based on statistics alone
let c3 = Arc::new(
TestChunk::new("chunk3").with_tag_column_with_nulls_and_full_stats(
"column1",
Some("a"),
Some("b"),
100,
None,
1, // that one peksy null!
),
) as Arc<dyn QueryChunk>;
let predicate = Predicate::new().with_expr(
col("column1")
.is_null()
.not()
.and(col("column1").eq(lit_dict("bar"))),
);
let chunks = vec![c1, c2, c3];
let schema = merge_schema(&chunks);
let result = prune_chunks(&schema, &chunks, &predicate);
assert_eq!(result.expect("pruning succeeds"), vec![true, false, false]);
}
#[test]
fn test_pruned_multi_column() {
test_helpers::maybe_start_logging();
// column1 > 100 AND column2 < 5 where
// c1: column1 [0, 1000], column2 [0, 4] --> not pruned (both in range)
// c2: column1 [0, 10], column2 [0, 4] --> pruned (column1 and column2 out of range)
// c3: column1 [0, 10], column2 [5, 10] --> pruned (column1 out of range, column2 in of range)
// c4: column1 [1000, 2000], column2 [0, 4] --> not pruned (column1 in range, column2 in range)
// c5: column1 [0, 10], column2 Null --> pruned (column1 out of range, but column2 has no stats)
// c6: column1 Null, column2 [0, 4] --> not pruned (column1 has no stats, column2 out of range)
let c1 = Arc::new(
TestChunk::new("chunk1")
.with_i64_field_column_with_stats("column1", Some(0), Some(1000))
.with_i64_field_column_with_stats("column2", Some(0), Some(4)),
) as Arc<dyn QueryChunk>;
let c2 = Arc::new(
TestChunk::new("chunk2")
.with_i64_field_column_with_stats("column1", Some(0), Some(10))
.with_i64_field_column_with_stats("column2", Some(0), Some(4)),
) as Arc<dyn QueryChunk>;
let c3 = Arc::new(
TestChunk::new("chunk3")
.with_i64_field_column_with_stats("column1", Some(0), Some(10))
.with_i64_field_column_with_stats("column2", Some(5), Some(10)),
) as Arc<dyn QueryChunk>;
let c4 = Arc::new(
TestChunk::new("chunk4")
.with_i64_field_column_with_stats("column1", Some(1000), Some(2000))
.with_i64_field_column_with_stats("column2", Some(0), Some(4)),
) as Arc<dyn QueryChunk>;
let c5 = Arc::new(
TestChunk::new("chunk5")
.with_i64_field_column_with_stats("column1", Some(0), Some(10))
.with_i64_field_column("column2"),
) as Arc<dyn QueryChunk>;
let c6 = Arc::new(
TestChunk::new("chunk6")
.with_i64_field_column("column1")
.with_i64_field_column_with_stats("column2", Some(0), Some(4)),
) as Arc<dyn QueryChunk>;
let predicate = Predicate::new().with_expr(
col("column1")
.gt(lit(100i64))
.and(col("column2").lt(lit(5i64))),
);
let chunks = vec![c1, c2, c3, c4, c5, c6];
let schema = merge_schema(&chunks);
let result = prune_chunks(&schema, &chunks, &predicate);
assert_eq!(
result.expect("Pruning succeeds"),
vec![true, false, false, true, false, true]
);
}
}
|
use regex::Regex;
use std::collections::HashMap;
use std::env;
use std::io;
#[macro_use]
extern crate lazy_static;
type FieldDef = HashMap<String, [(usize, usize); 2]>;
type Ticket = Vec<usize>;
fn parse_field_definitions(text: &str) -> FieldDef {
lazy_static! {
static ref REX: Regex = Regex::new(r"([\w ]+): (\d+)-(\d+) or (\d+)-(\d+)").unwrap();
}
let mut fields = HashMap::new();
for caps in REX.captures_iter(text) {
let fieldname = caps.get(1).unwrap().as_str();
let a = caps[2].parse::<usize>().unwrap();
let b = caps[3].parse::<usize>().unwrap();
let c = caps[4].parse::<usize>().unwrap();
let d = caps[5].parse::<usize>().unwrap();
fields.insert(String::from(fieldname), [(a, b), (c, d)]);
}
return fields;
}
fn parse_notes(text: String) -> (FieldDef, Ticket, Vec<Ticket>) {
let parts = text.split("\n\n").collect::<Vec<&str>>();
let fields = parse_field_definitions(parts[0]);
let ticket = parts[1]
.lines()
.last()
.unwrap()
.split(",")
.map(|x| x.parse::<usize>().unwrap())
.collect::<Vec<usize>>();
let tickets = parts[2]
.lines()
.skip(1)
.map(|x| x.split(",").map(|x| x.parse::<usize>().unwrap()).collect())
.collect();
return (fields, ticket, tickets);
}
fn part1(fields: &FieldDef, tickets: Vec<Ticket>) -> (usize, Vec<Ticket>) {
let mut valid_tickets = Vec::new();
let mut error_rate = 0;
for ticket in &tickets {
let mut invalid_ticket = false;
for value in ticket {
let mut invalid_value = true;
for range in fields.values() {
if (value >= &range[0].0 && value <= &range[0].1)
|| (value >= &range[1].0 && value <= &range[1].1)
{
invalid_value = false;
}
}
if invalid_value {
error_rate += value;
invalid_ticket = true;
}
}
if !invalid_ticket {
valid_tickets.push(ticket.clone());
}
}
return (error_rate, valid_tickets);
}
fn get_ticket_cols(tickets: Vec<Ticket>) -> Vec<Ticket> {
let mut ticket_cols = vec![];
for ticket in tickets {
for (i, value) in ticket.iter().enumerate() {
if let None = ticket_cols.get(i) {
ticket_cols.push(vec![]);
}
ticket_cols[i].push(*value);
}
}
return ticket_cols;
}
fn get_column_possibilities(
fields: &FieldDef,
ticket_cols: Vec<Ticket>,
) -> Vec<Vec<(usize, String)>> {
let mut columns = vec![];
for (i, col) in ticket_cols.iter().enumerate() {
columns.push(vec![]);
for (fieldname, ranges) in fields {
let mut in_field_range = true;
for value in col {
if value < &ranges[0].0
|| value > &ranges[0].1 && value < &ranges[1].0
|| value > &ranges[1].1
{
in_field_range = false;
break;
}
}
if in_field_range {
columns[i].push((i, fieldname.clone()));
}
}
}
return columns;
}
fn main() -> Result<(), io::Error> {
let args: Vec<String> = env::args().collect();
let text = std::fs::read_to_string(&args[1]).expect("read_to_string failed");
let (fields, ticket, tickets) = parse_notes(text);
let (error_rate, valid_tickets) = part1(&fields, tickets);
// valid_tickets.push(ticket);
let ticket_cols = get_ticket_cols(valid_tickets);
let mut possibilities = get_column_possibilities(&fields, ticket_cols);
println!("{:?}", error_rate);
let mut columns = HashMap::<usize, String>::new();
while columns.len() < fields.len() {
possibilities.sort_by(|a, b| a.len().cmp(&b.len()));
let next = possibilities.remove(0);
if next.len() == 1 {
let (id, label) = &next[0];
columns.insert(*id, label.clone());
for list in possibilities.iter_mut() {
list.retain(|(_, x)| label != x);
}
}
}
let mut product = 1;
for (id, field) in columns {
if field.starts_with("departure") {
product *= ticket[id];
}
}
println!("{}", product);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
let text = String::from(
"class: 1-3 or 5-7\n\
row: 6-11 or 33-44\n\
seat: 13-40 or 45-50\n\
\n\
your ticket:\n\
7,1,14\n\
\n\
nearby tickets:\n\
7,3,47\n\
40,4,50\n\
55,2,20\n\
38,6,12\n",
);
let (fields, ticket, tickets) = parse_notes(text);
assert_eq!(ticket, Ticket::from([7, 1, 14]));
assert_eq!(
tickets,
vec![
Ticket::from([7, 3, 47]),
Ticket::from([40, 4, 50]),
Ticket::from([55, 2, 20]),
Ticket::from([38, 6, 12])
]
);
let (error_rate, valid_tickets) = part1(&fields, tickets);
assert_eq!(error_rate, 71);
assert_eq!(valid_tickets.len(), 1);
}
#[test]
fn test_part2() {
let text = String::from(
"class: 0-1 or 4-19\n\
row: 0-5 or 8-19\n\
seat: 0-13 or 16-19\n\
\n\
your ticket:\n\
11,12,13\n\
\n\
nearby tickets:\n\
3,9,18\n\
15,1,5\n\
5,14,9",
);
let (fields, ticket, tickets) = parse_notes(text);
let (error_rate, valid_tickets) = part1(&fields, tickets);
let ticket_cols = get_ticket_cols(valid_tickets);
let columns = get_column_possibilities(&fields, ticket_cols);
println!("{:?}", columns);
assert_eq!(true, false);
}
}
|
use std::path::PathBuf;
use tauri::{InvokeMessage, Params};
pub fn invoke_handler<M>() -> impl Fn(InvokeMessage<M>) + Send + Sync + 'static
where
M: Params,
{
tauri::generate_handler![
free_caches,
check_dir,
check_file,
open,
open_file_location,
file_size,
crate::transactions::websocket,
crate::transactions::cancel_transaction,
crate::appdata::update_settings,
crate::appdata::validate_gmod,
crate::appdata::window_resized,
crate::game_addons::browse_installed_addons,
crate::game_addons::get_installed_addon,
crate::game_addons::downloader_extract_gmas,
crate::steam::is_steam_connected,
crate::steam::get_current_user,
crate::steam::users::get_steam_user,
crate::steam::workshop::fetch_workshop_items,
crate::steam::workshop::fetch_workshop_item,
crate::steam::workshop::browse_my_workshop,
crate::steam::workshop::workshop_item_channel,
crate::steam::downloads::workshop_download,
crate::steam::publishing::verify_whitelist,
crate::steam::publishing::publish,
crate::steam::publishing::verify_icon,
crate::addon_size_analyzer::addon_size_analyzer,
crate::content_generator::get_content_generator_manifests,
crate::content_generator::update_content_generator_manifest,
crate::gma::preview::preview_gma,
crate::gma::preview::extract_preview_entry,
crate::gma::preview::extract_preview_gma,
crate::search::search,
crate::search::search_channel,
crate::search::full_search,
]
}
#[tauri::command]
pub fn free_caches() {
crate::game_addons::free_caches();
crate::steam::workshop::free_caches();
search!().clear();
}
#[tauri::command]
pub fn check_file(path: PathBuf, extension: Option<String>) -> bool {
path.is_absolute()
&& path.is_file()
&& match extension {
Some(extension) => {
if let Some(picked_extension) = path.extension() {
extension.eq_ignore_ascii_case(&picked_extension.to_string_lossy())
} else {
false
}
}
None => true,
}
}
#[tauri::command]
pub fn check_dir(path: PathBuf) -> bool {
path.is_absolute() && path.is_dir()
}
#[tauri::command]
fn open(path: PathBuf) {
crate::path::open(path);
}
#[tauri::command]
fn open_file_location(path: PathBuf) {
crate::path::open_file_location(path);
}
#[tauri::command]
fn file_size(path: PathBuf) -> Option<u64> {
path.metadata().ok().map(|metadata| metadata.len())
}
|
use std::{error::Error, path::PathBuf};
use prost_build::Config;
fn main() -> Result<(), Box<dyn Error>> {
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("proto");
let proto = root.join("gossip.proto");
println!("cargo:rerun-if-changed={}", proto.display());
Config::new()
.bytes(["."])
.compile_protos(&[proto], &[root])?;
Ok(())
}
|
extern crate tempfile;
use tempfile::{Builder, NamedTempFile, TempPath};
extern crate yaml_rust;
use yaml_rust::yaml;
extern crate os_pipe;
use os_pipe::{pipe, IntoStdio};
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
use std::io::{Write, Read};
use std::env;
#[derive(Hash)]
struct ExecCommand {
name: String,
cmd: String,
args: Vec<String>,
}
#[derive(Hash)]
struct ExecResult {
name: String,
}
#[derive(Hash)]
struct ExecResults {
total_cnt: usize,
success_cnt: usize,
failed_cnt: usize,
failed_cmds: Vec<ExecResult>,
}
fn show_result(results: &ExecResults) {
let failed_cmds = &results.failed_cmds;
println!("Total commands: {}", results.total_cnt);
println!("Success commands: {}", results.success_cnt);
println!("Failed commands: {}", results.failed_cnt);
if results.failed_cnt > 0 {
println!("Following commands were faild");
for faild in failed_cmds {
println!("{}", faild.name);
}
}
}
fn filename_from_tempfile(file: &NamedTempFile) -> String {
let path = file.path();
let dir = format!("{}", std::env::temp_dir().to_str().unwrap());
let name = format!("{}/{}", dir, path.file_name().unwrap().to_string_lossy());
name
}
fn create_temporay_shell_script(file_data: &String) -> NamedTempFile {
let mut file = Builder::new()
.prefix("virtrun-")
.suffix(".sh")
.rand_bytes(16)
.tempfile().expect("failed to create tempfile");
file.write(file_data.as_bytes()).unwrap();
file
}
fn create_cmdline_parameters(data: &yaml::Yaml) -> (Vec<ExecCommand>, Vec<TempPath>) {
let commands = &data["commands"];
let mut exec_commands: Vec<ExecCommand> = Vec::new();
let mut tempfiles: Vec<TempPath> = Vec::new();
match *commands {
yaml::Yaml::Array(ref v) => {
for h in v {
let mut args_vec = Vec::new();
let mut cmd = h["command"].as_str().unwrap().to_string();
if cmd.contains("\n") {
let file = create_temporay_shell_script(&cmd);
let name = format!("{}", filename_from_tempfile(&file));
tempfiles.push(file.into_temp_path());
cmd = "bash".to_string();
args_vec.push(name);
}
if !h["args"].is_badvalue() {
let args = &h["args"];
match &args {
yaml::Yaml::String(ref s) => {
args_vec.push(s.clone());
},
yaml::Yaml::Array(ref array) => {
for val in array {
if val.as_i64() != None {
args_vec.push(format!("{}", val.as_i64().unwrap()));
} else if val.as_str() != None {
args_vec.push(format!("{}", val.as_str().unwrap()));
}
}
},
yaml::Yaml::Integer(ref i) => {
args_vec.push(format!("{}", args.as_i64().unwrap()));
},
_ => panic!("arg type {:?} is not supported", args)
}
} else {
args_vec.push("".to_string());
}
let ec = ExecCommand {
name: h["name"].as_str().unwrap().to_string(),
cmd: cmd,
args: args_vec,
};
exec_commands.push(ec);
}
},
_ => panic!("commands should be an array")
}
(exec_commands, tempfiles)
}
fn run_local(data: &yaml::Yaml) -> ExecResults {
let (commands, tmpfiles) = create_cmdline_parameters(&data);
let total_commands = commands.len();
let mut failed_cmds: Vec<ExecResult> = Vec::new();
for command in commands {
println!("run {} {:?}", command.cmd, command.args);
let mut child = std::process::Command::new(command.cmd);
child.args(&command.args);
let (mut fin, fout) = pipe().unwrap();
let fout_clone = fout.try_clone().unwrap();
child.stdout(fout.into_stdio());
child.stderr(fout_clone.into_stdio());
let mut handle = child.spawn().unwrap();
drop(child);
let mut output = String::new();
fin.read_to_string(&mut output).unwrap();
let status = handle.wait().unwrap();
println!("status: {}", status);
println!("result: {}", output);
if !status.success() {
let result = ExecResult {
name: command.name,
};
failed_cmds.push(result);
}
}
let results = ExecResults {
total_cnt: total_commands,
success_cnt: total_commands - failed_cmds.len(),
failed_cnt: failed_cmds.len(),
failed_cmds: failed_cmds,
};
results
}
fn run_qemu(data: &yaml::Yaml) {
let mut qemu_args = create_qemu_args(data);
let qemu_bin = format!("{}", data["qemu_options"]["bin"].as_str().unwrap());
println!("{:?}", qemu_args);
let mut child = std::process::Command::new(qemu_bin);
child.args(&qemu_args);
let (mut fin, fout) = pipe().unwrap();
let fout_clone = fout.try_clone().unwrap();
child.stdout(fout.into_stdio());
child.stderr(fout_clone.into_stdio());
let mut handle = child.spawn().unwrap();
let pid = handle.id();
drop(child);
let mut output = String::new();
fin.read_to_string(&mut output).unwrap();
let status = handle.wait().unwrap();
let pty = get_pty_device_from_qemu_output(&output);
println!("pid: {}, pyt: {}", pid, pty);
if !status.success() {
panic!("failed to run qemu");
}
}
fn get_pty_device_from_qemu_output(output: &String) -> String {
// qemu output is following message
// qemu-system-x86_64: -serial pty: char device redirected to /dev/pts/3 (label serial0)
let v: Vec<&str> = output.split(' ').collect();
v[v.len() - 3].to_string()
}
fn get_yaml_hash_value(item: &yaml::Yaml, k: &str, v: &str) -> (String, String) {
let actual_val;
let opt = item[k].as_str().unwrap();
let val = &item[v];
if val.as_i64() != None {
actual_val = format!("{}", val.as_i64().unwrap());
} else if val.as_str() != None {
actual_val = format!("{}", val.as_str().unwrap());
} else {
panic!("unknown data type for {}", opt);
}
(opt.to_string(), actual_val.to_string())
}
fn create_qemu_args(data: &yaml::Yaml) -> Vec<String> {
let mut cmdline: Vec<String> = Vec::new();
let d = &data["qemu_options"]["params"];
match *d {
yaml::Yaml::Array(ref v) => {
for item in v {
match *item {
yaml::Yaml::Hash(ref _h) => {}
_ => panic!("qemu_options value is not hash type!")
}
let (opt, val) = get_yaml_hash_value(item, "opt", "val");
// let s = format!("-{} {}", opt, val);
// cmdline.push(s);
let s = format!("-{}", opt);
cmdline.push(s);
cmdline.push(val);
}
},
_ => panic!("qemu_options is not array type!")
}
let s = format!("-daemonize");
cmdline.push(s);
cmdline
}
fn load_yaml(path: String) -> std::io::Result<(yaml::Yaml)> {
let file = File::open(path)?;
let mut buf_reader = BufReader::new(file);
let mut contents = String::new();
buf_reader.read_to_string(&mut contents)?;
let docs = yaml::YamlLoader::load_from_str(&contents).unwrap();
let doc = docs[0].clone();
Ok(doc)
}
fn main() -> std::io::Result<()> {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
eprintln!("usage: {} [path to yaml file]", args[0]);
std::process::exit(1);
}
let yaml = args[1].to_string();
let file_data = load_yaml(yaml).unwrap();
let virt_type = file_data["virt_type"].as_str().unwrap();
let connect_type = file_data["connect"]["type"].as_str().unwrap();
let results;
match virt_type as &str {
"qemu" => {
println!("*[*] Start run_qemu");
run_qemu(&file_data);
},
"local" => {
},
_ => panic!("unknown virt_type {}", virt_type)
}
match connect_type {
"local" => {
println!("[*] Start run_local");
results = run_local(&file_data);
},
// "ssh" => {
//
// },
// "serial" => {
// },
_ => panic!("type {} is not suppoted yet.", connect_type)
}
show_result(&results);
// println!("{:?}", file_data);
Ok(())
}
|
use std::collections::LinkedList;
use piston_window::{Context, G2d};
use piston_window::types::Color;
use draw::draw_pixel;
use direction::Direction;
use utils::Pixel;
const SNAKE_COLOR: Color = [0.0, 0.8, 0.2, 1.0];
pub struct Snake {
direction: Direction,
body: LinkedList<Pixel>,
tail: Option<Pixel>,
}
impl Snake {
pub fn new(x_cord:i32,y_cord:i32) -> Snake{
let mut body: LinkedList<Pixel> = LinkedList::new();
body.push_back(Pixel{x_cord:x_cord+2, y_cord});
body.push_back(Pixel{x_cord:x_cord+1, y_cord});
body.push_back(Pixel{x_cord, y_cord});
Snake {
direction: Direction::Right,
body,
tail:None,
}
}
pub fn draw(&self, con: &Context, graphics: &mut G2d){
for pixel in &self.body {
draw_pixel(SNAKE_COLOR, pixel.x_cord, pixel.y_cord, con, graphics)
}
}
pub fn head_position(&self) -> (i32, i32){
let head = self.body.front().unwrap();
(head.x_cord, head.y_cord)
}
pub fn move_forward(&mut self, actual_dir: Option<Direction>) {
match actual_dir{
Some(d) => self.direction = d,
None => (),
}
let (last_x, last_y):(i32, i32) = self.head_position();
let new_pixel = match self.direction {
Direction::Up => Pixel {x_cord:last_x, y_cord:last_y-1,},
Direction::Down => Pixel {x_cord:last_x, y_cord:last_y+1},
Direction::Left => Pixel {x_cord:last_x-1, y_cord:last_y},
Direction::Right => Pixel {x_cord:last_x+1, y_cord:last_y},
};
self.body.push_front(new_pixel);
let removed = self.body.pop_back().unwrap();
self.tail = Some(removed);
}
pub fn head_direction(&self) -> Direction{
self.direction
}
pub fn next_head(&self, direction: Option<Direction>) -> (i32, i32) {
let (head_x, head_y):(i32, i32) = self.head_position();
let mut mov_dir = self.direction;
match direction{
Some(d) => mov_dir = d,
None => {}
}
match mov_dir {
Direction::Up => (head_x, head_y-1),
Direction::Down => (head_x, head_y+1),
Direction::Left => (head_x-1, head_y),
Direction::Right => (head_x+1, head_y),
}
}
pub fn restore_tail(&mut self) {
let pixel = self.tail.clone().unwrap();
self.body.push_back(pixel);
}
pub fn tail_collision(&self, x_cord:i32, y_cord:i32) -> bool {
for pixel in &self.body {
if x_cord == pixel.x_cord && y_cord== pixel.y_cord {
return true;
}
}
return false;
}
} |
// This file is Copyright its original authors, visible in version control
// history.
//
// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// You may not use this file except in accordance with one or both of these
// licenses.
use serde::{de, Deserialize, Deserializer, Serialize};
use std::{fmt, str::FromStr};
pub mod admin;
pub mod node;
/// Serde deserialization decorator to map empty Strings to None,
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: FromStr,
T::Err: fmt::Display,
{
let opt = Option::<String>::deserialize(de)?;
match opt.as_deref() {
None | Some("") => Ok(None),
Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some),
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PaginationRequest {
pub page: u32,
pub take: u32,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub query: Option<String>,
}
impl Default for PaginationRequest {
fn default() -> Self {
Self {
page: 0,
take: 10,
query: None,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct PaymentsFilter {
#[serde(default, deserialize_with = "empty_string_as_none")]
pub origin: Option<String>,
pub status: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, Default)]
pub struct ForwardedPaymentsFilter {
#[serde(default, deserialize_with = "empty_string_as_none")]
pub from_channel_id: Option<String>,
pub to_channel_id: Option<String>,
pub from_hours_since_epoch: Option<u64>,
pub to_hours_since_epoch: Option<u64>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ListPaymentsParams {
pub page: u32,
pub take: u32,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub origin: Option<String>,
pub status: Option<String>,
pub query: Option<String>,
}
impl Default for ListPaymentsParams {
fn default() -> Self {
Self {
page: 1,
take: 10,
query: None,
origin: None,
status: None,
}
}
}
impl From<ListPaymentsParams> for PaymentsFilter {
fn from(params: ListPaymentsParams) -> Self {
Self {
origin: params.origin,
status: params.status,
}
}
}
impl From<ListPaymentsParams> for PaginationRequest {
fn from(params: ListPaymentsParams) -> Self {
Self {
page: params.page,
take: params.take,
query: params.query,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ListChannelsParams {
pub page: u32,
pub take: u32,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub query: Option<String>,
}
impl Default for ListChannelsParams {
fn default() -> Self {
Self {
page: 1,
take: 10,
query: None,
}
}
}
impl From<ListChannelsParams> for PaginationRequest {
fn from(params: ListChannelsParams) -> Self {
Self {
page: params.page,
take: params.take,
query: params.query,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ListTransactionsParams {
pub page: u32,
pub take: u32,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub query: Option<String>,
}
impl Default for ListTransactionsParams {
fn default() -> Self {
Self {
page: 1,
take: 10,
query: None,
}
}
}
impl From<ListTransactionsParams> for PaginationRequest {
fn from(params: ListTransactionsParams) -> Self {
Self {
page: params.page,
take: params.take,
query: params.query,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ListKnownPeersParams {
pub page: u32,
pub take: u32,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub query: Option<String>,
}
impl Default for ListKnownPeersParams {
fn default() -> Self {
Self {
page: 0,
take: 10,
query: None,
}
}
}
impl From<ListKnownPeersParams> for PaginationRequest {
fn from(params: ListKnownPeersParams) -> Self {
Self {
page: params.page,
take: params.take,
query: params.query,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ListClusterNodesParams {
pub page: u32,
pub take: u32,
#[serde(default, deserialize_with = "empty_string_as_none")]
pub query: Option<String>,
}
impl Default for ListClusterNodesParams {
fn default() -> Self {
Self {
page: 0,
take: 10,
query: None,
}
}
}
impl From<ListClusterNodesParams> for PaginationRequest {
fn from(params: ListClusterNodesParams) -> Self {
Self {
page: params.page,
take: params.take,
query: params.query,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct PaginationResponse {
pub has_more: bool,
pub total: u64,
}
|
use std::cell::Ref;
use std::sync::Arc;
use nalgebra::Vector2;
use rayon::prelude::*;
use smallvec::SmallVec;
use sourcerenderer_core::graphics::*;
use sourcerenderer_core::{
Matrix4,
Platform,
Vec2,
Vec2I,
Vec2UI,
Vec4,
};
use super::desktop_renderer::FrameBindings;
use crate::renderer::passes::conservative::desktop_renderer::setup_frame;
use crate::renderer::passes::light_binning;
use crate::renderer::passes::rt_shadows::RTShadowPass;
use crate::renderer::passes::ssao::SsaoPass;
use crate::renderer::render_path::{
RenderPassParameters,
};
use crate::renderer::renderer_assets::*;
use crate::renderer::renderer_resources::{
HistoryResourceEntry,
RendererResources,
};
use crate::renderer::shader_manager::{
GraphicsPipelineHandle,
GraphicsPipelineInfo,
ShaderManager,
};
#[repr(C)]
#[derive(Debug, Clone, Copy)]
struct FrameData {
swapchain_transform: Matrix4,
halton_point: Vec2,
z_near: f32,
z_far: f32,
rt_size: Vector2<u32>,
cluster_z_bias: f32,
cluster_z_scale: f32,
cluster_count: nalgebra::Vector3<u32>,
point_light_count: u32,
directional_light_count: u32,
}
pub struct GeometryPass<P: Platform> {
sampler: Arc<<P::GraphicsBackend as Backend>::Sampler>,
pipeline: GraphicsPipelineHandle,
}
impl<P: Platform> GeometryPass<P> {
pub const GEOMETRY_PASS_TEXTURE_NAME: &'static str = "geometry";
const DRAWABLE_LABELS: bool = false;
pub fn new(
device: &Arc<<P::GraphicsBackend as Backend>::Device>,
resolution: Vec2UI,
barriers: &mut RendererResources<P::GraphicsBackend>,
shader_manager: &mut ShaderManager<P>,
) -> Self {
let texture_info = TextureInfo {
dimension: TextureDimension::Dim2D,
format: Format::RGBA8UNorm,
width: resolution.x,
height: resolution.y,
depth: 1,
mip_levels: 1,
array_length: 1,
samples: SampleCount::Samples1,
usage: TextureUsage::SAMPLED
| TextureUsage::RENDER_TARGET
| TextureUsage::COPY_SRC
| TextureUsage::STORAGE,
supports_srgb: false,
};
barriers.create_texture(Self::GEOMETRY_PASS_TEXTURE_NAME, &texture_info, false);
let sampler = device.create_sampler(&SamplerInfo {
mag_filter: Filter::Linear,
min_filter: Filter::Linear,
mip_filter: Filter::Linear,
address_mode_u: AddressMode::Repeat,
address_mode_v: AddressMode::Repeat,
address_mode_w: AddressMode::Repeat,
mip_bias: 0.0,
max_anisotropy: 0.0,
compare_op: None,
min_lod: 0.0,
max_lod: None,
});
let pipeline_info: GraphicsPipelineInfo = GraphicsPipelineInfo {
vs: "shaders/textured.vert.spv",
fs: Some("shaders/textured.frag.spv"),
primitive_type: PrimitiveType::Triangles,
vertex_layout: VertexLayoutInfo {
input_assembler: &[InputAssemblerElement {
binding: 0,
stride: 64,
input_rate: InputRate::PerVertex,
}],
shader_inputs: &[
ShaderInputElement {
input_assembler_binding: 0,
location_vk_mtl: 0,
semantic_name_d3d: String::from(""),
semantic_index_d3d: 0,
offset: 0,
format: Format::RGB32Float,
},
ShaderInputElement {
input_assembler_binding: 0,
location_vk_mtl: 1,
semantic_name_d3d: String::from(""),
semantic_index_d3d: 0,
offset: 16,
format: Format::RGB32Float,
},
ShaderInputElement {
input_assembler_binding: 0,
location_vk_mtl: 2,
semantic_name_d3d: String::from(""),
semantic_index_d3d: 0,
offset: 32,
format: Format::RG32Float,
},
ShaderInputElement {
input_assembler_binding: 0,
location_vk_mtl: 3,
semantic_name_d3d: String::from(""),
semantic_index_d3d: 0,
offset: 40,
format: Format::RG32Float,
},
ShaderInputElement {
input_assembler_binding: 0,
location_vk_mtl: 4,
semantic_name_d3d: String::from(""),
semantic_index_d3d: 0,
offset: 48,
format: Format::R32Float,
},
],
},
rasterizer: RasterizerInfo {
fill_mode: FillMode::Fill,
cull_mode: CullMode::Back,
front_face: FrontFace::Clockwise,
sample_count: SampleCount::Samples1,
},
depth_stencil: DepthStencilInfo {
depth_test_enabled: true,
depth_write_enabled: false,
depth_func: CompareFunc::LessEqual,
stencil_enable: false,
stencil_read_mask: 0u8,
stencil_write_mask: 0u8,
stencil_front: StencilInfo::default(),
stencil_back: StencilInfo::default(),
},
blend: BlendInfo {
alpha_to_coverage_enabled: false,
logic_op_enabled: false,
logic_op: LogicOp::And,
constants: [0f32, 0f32, 0f32, 0f32],
attachments: &[AttachmentBlendInfo::default()],
},
};
let pipeline = shader_manager.request_graphics_pipeline(
&pipeline_info,
&RenderPassInfo {
attachments: &[
AttachmentInfo {
format: texture_info.format,
samples: texture_info.samples,
},
AttachmentInfo {
format: Format::D24,
samples: SampleCount::Samples1,
},
],
subpasses: &[SubpassInfo {
input_attachments: &[],
output_color_attachments: &[OutputAttachmentRef {
index: 0,
resolve_attachment_index: None,
}],
depth_stencil_attachment: Some(DepthStencilAttachmentRef {
index: 1,
read_only: true,
}),
}],
},
0,
);
Self { sampler, pipeline }
}
#[profiling::function]
pub(super) fn execute(
&mut self,
cmd_buffer: &mut <P::GraphicsBackend as Backend>::CommandBuffer,
pass_params: &RenderPassParameters<'_, P>,
depth_name: &str,
bindings: &FrameBindings<P::GraphicsBackend>,
) {
cmd_buffer.begin_label("Geometry pass");
let static_drawables = pass_params.scene.scene.static_drawables();
let (width, height) = {
let info = pass_params.resources.texture_info(Self::GEOMETRY_PASS_TEXTURE_NAME);
(info.width, info.height)
};
let rtv_ref = pass_params.resources.access_view(
cmd_buffer,
Self::GEOMETRY_PASS_TEXTURE_NAME,
BarrierSync::RENDER_TARGET,
BarrierAccess::RENDER_TARGET_READ | BarrierAccess::RENDER_TARGET_WRITE,
TextureLayout::RenderTarget,
true,
&TextureViewInfo::default(),
HistoryResourceEntry::Current,
);
let rtv = &*rtv_ref;
let prepass_depth_ref = pass_params.resources.access_view(
cmd_buffer,
depth_name,
BarrierSync::EARLY_DEPTH | BarrierSync::LATE_DEPTH,
BarrierAccess::DEPTH_STENCIL_READ,
TextureLayout::DepthStencilRead,
false,
&TextureViewInfo::default(),
HistoryResourceEntry::Current,
);
let prepass_depth = &*prepass_depth_ref;
let ssao_ref = pass_params.resources.access_view(
cmd_buffer,
SsaoPass::<P>::SSAO_TEXTURE_NAME,
BarrierSync::FRAGMENT_SHADER | BarrierSync::COMPUTE_SHADER,
BarrierAccess::SAMPLING_READ,
TextureLayout::Sampled,
false,
&TextureViewInfo::default(),
HistoryResourceEntry::Current,
);
let ssao = &*ssao_ref;
let light_bitmask_buffer_ref = pass_params.resources.access_buffer(
cmd_buffer,
light_binning::LightBinningPass::LIGHT_BINNING_BUFFER_NAME,
BarrierSync::FRAGMENT_SHADER,
BarrierAccess::STORAGE_READ,
HistoryResourceEntry::Current,
);
let light_bitmask_buffer = &*light_bitmask_buffer_ref;
let rt_shadows: Ref<Arc<<P::GraphicsBackend as Backend>::TextureView>>;
let shadows = if pass_params.device.supports_ray_tracing() {
rt_shadows = pass_params.resources.access_view(
cmd_buffer,
RTShadowPass::SHADOWS_TEXTURE_NAME,
BarrierSync::FRAGMENT_SHADER,
BarrierAccess::SAMPLING_READ,
TextureLayout::Sampled,
false,
&TextureViewInfo::default(),
HistoryResourceEntry::Current,
);
&*rt_shadows
} else {
pass_params.zero_textures.zero_texture_view
};
/*let clusters = barriers.access_buffer(
cmd_buffer,
ClusteringPass::<P::GraphicsBackend>::CLUSTERS_BUFFER_NAME,
BarrierSync::FRAGMENT_SHADER,
BarrierAccess::STORAGE_READ,
HistoryResourceEntry::Current
).clone();*/
cmd_buffer.begin_render_pass(
&RenderPassBeginInfo {
attachments: &[
RenderPassAttachment {
view: RenderPassAttachmentView::RenderTarget(&rtv),
load_op: LoadOp::Clear,
store_op: StoreOp::Store,
},
RenderPassAttachment {
view: RenderPassAttachmentView::DepthStencil(&prepass_depth),
load_op: LoadOp::Load,
store_op: StoreOp::Store,
},
],
subpasses: &[SubpassInfo {
input_attachments: &[],
output_color_attachments: &[OutputAttachmentRef {
index: 0,
resolve_attachment_index: None,
}],
depth_stencil_attachment: Some(DepthStencilAttachmentRef {
index: 1,
read_only: true,
}),
}],
},
RenderpassRecordingMode::CommandBuffers,
);
let device = pass_params.device;
let assets = pass_params.assets;
let zero_textures = pass_params.zero_textures;
let lightmap = pass_params.scene.lightmap;
let inheritance = cmd_buffer.inheritance();
const CHUNK_SIZE: usize = 128;
let view = &pass_params.scene.views[pass_params.scene.active_view_index];
let chunks = view.drawable_parts.par_chunks(CHUNK_SIZE);
let pipeline = pass_params.shader_manager.get_graphics_pipeline(self.pipeline);
let inner_cmd_buffers: Vec<
<P::GraphicsBackend as Backend>::CommandBufferSubmission,
> = chunks
.map(|chunk| {
let mut command_buffer = device
.graphics_queue()
.create_inner_command_buffer(inheritance);
command_buffer.set_pipeline(PipelineBinding::Graphics(&pipeline));
command_buffer.set_viewports(&[Viewport {
position: Vec2::new(0.0f32, 0.0f32),
extent: Vec2::new(width as f32, height as f32),
min_depth: 0.0f32,
max_depth: 1.0f32,
}]);
command_buffer.set_scissors(&[Scissor {
position: Vec2I::new(0, 0),
extent: Vec2UI::new(9999, 9999),
}]);
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::Frequent,
0,
if let Some(lightmap) = lightmap { &lightmap.view } else { zero_textures.zero_texture_view },
&self.sampler,
);
command_buffer.bind_sampler(BindingFrequency::Frequent, 1, &self.sampler);
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::Frequent,
2,
&shadows,
&self.sampler,
);
command_buffer.bind_storage_buffer(
BindingFrequency::Frequent,
3,
&light_bitmask_buffer,
0,
WHOLE_BUFFER,
);
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::Frequent,
4,
&ssao,
&self.sampler,
);
// command_buffer.bind_storage_buffer(BindingFrequency::Frequent, 5, &clusters, 0, WHOLE_BUFFER);
command_buffer.track_texture_view(zero_textures.zero_texture_view);
command_buffer.track_texture_view(zero_textures.zero_texture_view_black);
let mut last_material = Option::<&RendererMaterial>::None;
for part in chunk.iter() {
let drawable = &static_drawables[part.drawable_index];
if Self::DRAWABLE_LABELS {
command_buffer.begin_label(&format!("Drawable {}", part.drawable_index));
}
setup_frame::<P::GraphicsBackend>(&mut command_buffer, bindings);
command_buffer.upload_dynamic_data_inline(
&[drawable.transform],
ShaderType::VertexShader,
);
let model = assets.get_model(drawable.model);
if model.is_none() {
log::info!("Skipping draw because of missing model");
continue;
}
let model = model.unwrap();
let mesh = assets.get_mesh(model.mesh_handle());
if mesh.is_none() {
log::info!("Skipping draw because of missing mesh");
continue;
}
let mesh = mesh.unwrap();
let materials: SmallVec<[&RendererMaterial; 8]> = model
.material_handles()
.iter()
.map(|handle| assets.get_material(*handle))
.collect();
command_buffer
.set_vertex_buffer(mesh.vertices.buffer(), mesh.vertices.offset() as usize);
if let Some(indices) = mesh.indices.as_ref() {
command_buffer.set_index_buffer(
indices.buffer(),
indices.offset() as usize,
IndexFormat::U32,
);
}
let range = &mesh.parts[part.part_index];
let material = &materials[part.part_index];
if last_material.as_ref() != Some(material) {
#[repr(C)]
#[derive(Clone, Copy)]
struct MaterialInfo {
albedo: Vec4,
roughness_factor: f32,
metalness_factor: f32,
albedo_texture_index: u32,
}
let mut material_info = MaterialInfo {
albedo: Vec4::new(1f32, 1f32, 1f32, 1f32),
roughness_factor: 0f32,
metalness_factor: 0f32,
albedo_texture_index: 0u32,
};
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::VeryFrequent,
0,
zero_textures.zero_texture_view,
&self.sampler,
);
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::VeryFrequent,
1,
zero_textures.zero_texture_view,
&self.sampler,
);
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::VeryFrequent,
2,
zero_textures.zero_texture_view,
&self.sampler,
);
let albedo_value = material.get("albedo").unwrap();
match albedo_value {
RendererMaterialValue::Texture(handle) => {
let albedo_view = &assets.get_texture(*handle).view;
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::VeryFrequent,
0,
albedo_view,
&self.sampler,
);
command_buffer.track_texture_view(albedo_view);
material_info.albedo_texture_index = 0;
}
RendererMaterialValue::Vec4(val) => material_info.albedo = *val,
RendererMaterialValue::Float(_) => unimplemented!(),
}
let roughness_value = material.get("roughness");
match roughness_value {
Some(RendererMaterialValue::Texture(handle)) => {
let roughness_view = &assets.get_texture(*handle).view;
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::VeryFrequent,
1,
roughness_view,
&self.sampler,
);
}
Some(RendererMaterialValue::Vec4(_)) => unimplemented!(),
Some(RendererMaterialValue::Float(val)) => {
material_info.roughness_factor = *val;
}
None => {}
}
let metalness_value = material.get("metalness");
match metalness_value {
Some(RendererMaterialValue::Texture(handle)) => {
let metalness_view = &assets.get_texture(*handle).view;
command_buffer.bind_sampling_view_and_sampler(
BindingFrequency::VeryFrequent,
2,
metalness_view,
&self.sampler,
);
}
Some(RendererMaterialValue::Vec4(_)) => unimplemented!(),
Some(RendererMaterialValue::Float(val)) => {
material_info.metalness_factor = *val;
}
None => {}
}
let material_info_buffer = command_buffer
.upload_dynamic_data(&[material_info], BufferUsage::CONSTANT);
command_buffer.bind_uniform_buffer(
BindingFrequency::VeryFrequent,
3,
&material_info_buffer,
0,
WHOLE_BUFFER,
);
last_material = Some(material.clone());
}
command_buffer.finish_binding();
if mesh.indices.is_some() {
command_buffer.draw_indexed(1, 0, range.count, range.start, 0);
} else {
command_buffer.draw(range.count, range.start);
}
if Self::DRAWABLE_LABELS {
command_buffer.end_label();
}
}
command_buffer.finish()
})
.collect();
cmd_buffer.execute_inner(inner_cmd_buffers);
cmd_buffer.end_render_pass();
cmd_buffer.end_label();
}
}
|
use extra::json::{Json, List, String, Boolean, Number, Null};
use super::{error, Error};
/// A trait for getting values from json
pub trait FromJson {
/// Convert a JSON object into a Rust type
/// # Arguments
///
/// * `j` - JSON object to be converted
///
/// # Return Value
/// Either the desired object, or an Error struct describing the problem.
fn from_json(j: &Json) -> Result<Self, Error>;
}
impl FromJson for Json {
fn from_json(j: &Json) -> Result<Json, Error> {
Ok(j.clone())
}
}
impl FromJson for f64 {
fn from_json(j: & Json) -> Result<f64, Error> {
match *j {
Number(n) => Ok(n),
_ => error(super::Conversion, super::NotNumber)
}
}
}
impl FromJson for f32 {
fn from_json(j: & Json) -> Result<f32, Error> {
match *j {
Number(n) => Ok(n as f32),
_ => error(super::Conversion, super::NotNumber)
}
}
}
impl FromJson for int {
fn from_json(j: & Json) -> Result<int, Error> {
match *j {
Number(n) => {
match n.to_int() {
Some(i) => Ok(i),
None => error(super::Conversion, super::NotInteger)
}
}
_ => error(super::Conversion, super::NotNumber)
}
}
}
impl FromJson for ~str {
fn from_json(j: & Json) -> Result<~str, Error> {
match *j {
String(ref s) => Ok(s.clone()),
_ => error(super::Conversion, super::NotString)
}
}
}
impl FromJson for bool {
fn from_json(j: & Json) -> Result<bool, Error> {
match *j {
Boolean(b) => Ok(b),
_ => error(super::Conversion, super::NotBoolean)
}
}
}
impl<T: FromJson> FromJson for ~[T] {
fn from_json(j: & Json) -> Result<~[T], Error> {
match *j {
List(ref l) => {
let list = l.iter()
.map(|json| {
let dummy: Result<T, Error> = FromJson::from_json(json);
match dummy {
Err(ref e) => debug!("{}", *e),
_ => {}
}
dummy
})
.filter(|result| result.is_ok())
.map(|result| result.unwrap())
.collect();
Ok(list)
}
_ => error(super::Conversion, super::NotList)
}
}
}
impl<T: FromJson> FromJson for Option<T> {
fn from_json(j: & Json) -> Result<Option<T>, Error> {
match *j {
Null => Ok(None),
ref otherwise => {
let t = FromJson::from_json(otherwise);
match t {
Ok(thing) => Ok(Some(thing)),
Err(v) => Err(v)
}
}
}
}
}
impl<T: FromJson, E: FromJson> FromJson for Result<T, E> {
fn from_json(j: & Json) -> Result<Result<T, E>, Error> {
let t: Result<T, Error> = FromJson::from_json(j);
match t {
Ok(t_res) => { return Ok(Ok(t_res)); }
_ => {
let e: Result<E, Error> = FromJson::from_json(j);
match e {
Ok(e_res) => { return Ok(Err(e_res)); }
_ => {
return error(super::Conversion, super::NotEither);
}
}
}
}
}
}
#[cfg(test)]
mod tests {
use extra::json::{Json, from_str};
use jsonutil::{JsonLike};
static DATA : &'static str = r###"{"count": 2, "results":[{"Name": "regalia", "Author": "madjar", "ID": 42}]}"###;
json_struct!(Y,
"Name" -> name: ~str,
"Author" -> author: ~str,
"ID" -> id: int)
#[test]
fn test_convert_and_macro() {
struct X {
count: int,
results: ~[Y]
}
from_json!(X, count, results)
let json: Json = from_str(DATA).unwrap();
// Get value from the json without having to match the result
let x: X = super::FromJson::from_json(&json).unwrap();
assert!(x.count == 2i);
assert!(x.results[0].name == ~"regalia");
assert!(x.results[0].author == ~"madjar");
assert!(x.results[0].id == 42i);
}
} |
/*!
This is currently a thin abstraction wrapper around libsodium/sodiumoxide.
Sacrifices some minor memory efficiencies to provide a more straight-forward interoperative api.
*/
// error_chain recursion limit
#![recursion_limit = "1024"]
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate lazy_static;
extern crate sodiumoxide;
pub mod errors;
pub mod hash;
pub mod init;
pub mod kx;
pub mod rand;
pub mod sign;
pub mod sym;
|
fn main() {
let input = include_str!("day9.txt");
let mut v: Vec<i64> = Vec::new();
let split = input.split("\n");
for s in split {
v.push(s.parse().unwrap());
}
for i in 25..v.len() {
let mut works = false;
for j in (i-25)..i {
for k in (i-25)..i {
if v[j] + v[k] == v[i] {
works = true;
break;
}
}
}
if !works {
println!("{}", v[i]);
}
}
} |
use super::{Fraction, NFrac, PFrac, UFrac, UFraction};
use crate::{common::*, numeric::Gcd};
typ! {
pub fn Reciprocal<frac>(frac: _) {
match frac {
#[generics(num: Unsigned + NonZero, deno: Unsigned + NonZero)]
UFrac::<num, deno> => UFrac::<deno, num>,
#[generics(num: Unsigned + NonZero, deno: Unsigned + NonZero)]
NFrac::<UFrac<num, deno>> => NFrac::<UFrac<deno, num>>,
#[generics(num: Unsigned + NonZero, deno: Unsigned + NonZero)]
PFrac::<UFrac<num, deno>> => PFrac::<UFrac<deno, num>>,
}
}
pub fn Reduce<frac>(frac: _) {
match frac {
#[generics(num: Unsigned, deno: Unsigned + NonZero)]
UFrac::<num, deno> => {
let gcd = Gcd(num, deno);
let num: Unsigned = num / gcd;
let deno: Unsigned + NonZero = deno / gcd;
UFrac::<num, deno>
}
#[generics(num: Unsigned, deno: Unsigned + NonZero)]
NFrac::<UFrac<num, deno>> => {
let gcd = Gcd(num, deno);
let num: Unsigned = num / gcd;
let deno: Unsigned + NonZero = deno / gcd;
NFrac::<UFrac<num, deno>>
}
#[generics(num: Unsigned, deno: Unsigned + NonZero)]
PFrac::<UFrac<num, deno>> => {
let gcd = Gcd(num, deno);
let num: Unsigned = num / gcd;
let deno: Unsigned + NonZero = deno / gcd;
PFrac::<UFrac<num, deno>>
}
}
}
pub fn UFracAdd<ln, ld, rn, rd>(UFrac::<ln, ld>: UFraction, UFrac::<rn, rd>: UFraction) -> UFraction
where
ln: Unsigned,
ld: Unsigned + NonZero,
rn: Unsigned,
rd: Unsigned + NonZero,
{
let num: Unsigned = ln * rd + rn * ld;
let deno: Unsigned + NonZero = ld * rd;
let frac = UFrac::<num, deno>;
let frac: UFraction = Reduce(frac);
frac
}
pub fn FracAdd<lhs, rhs>(lhs: Fraction, rhs: Fraction) -> Fraction
{
match lhs {
#[generics(ln: Unsigned, ld: Unsigned + NonZero)]
PFrac::<UFrac<ln, ld>> => {
match rhs {
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
PFrac::<UFrac<rn, rd>> => {
let num: Unsigned = ln * rd + rn * ld;
let deno: Unsigned + NonZero = ld * rd;
Reduce(PFrac::<UFrac<num, deno>>)
}
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
NFrac::<UFrac<rn, rd>> => {
if ln * rd >= rn * ld {
let num: Unsigned = ln * rd - rn * ld;
let deno: Unsigned + NonZero = ld * rd;
Reduce(PFrac::<UFrac<num, deno>>)
} else {
let num: Unsigned = rn * ld - ln * rd;
let deno: Unsigned + NonZero = ld * rd;
Reduce(NFrac::<UFrac::<num, deno>>)
}
}
}
}
#[generics(ln: Unsigned, ld: Unsigned + NonZero)]
NFrac::<UFrac<ln, ld>> => {
match rhs {
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
PFrac::<UFrac<rn, rd>> => {
if ln * rd >= rn * ld {
let num: Unsigned = ln * rd - rn * ld;
let deno: Unsigned + NonZero = ld * rd;
Reduce(NFrac::<UFrac<num, deno>>)
} else {
let num: Unsigned = rn * ld - ln * rd;
let deno: Unsigned + NonZero = ld * rd;
Reduce(PFrac::<UFrac::<num, deno>>)
}
}
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
NFrac::<UFrac<rn, rd>> => {
let num: Unsigned = ln * rd + rn * ld;
let deno: Unsigned + NonZero = ld * rd;
Reduce(NFrac::<UFrac<num, deno>>)
}
}
}
}
}
pub fn UFracSub<ln, ld, rn, rd>(UFrac::<ln, ld>: UFraction, UFrac::<rn, rd>: UFraction) -> UFraction
where
ln: Unsigned,
ld: Unsigned + NonZero,
rn: Unsigned,
rd: Unsigned + NonZero,
{
let num: Unsigned = ln * rd - rn * ld;
let deno: Unsigned + NonZero = ld * rd;
let frac = UFrac::<num, deno>;
Reduce(frac)
}
pub fn FracSub<lhs, rhs>(lhs: Fraction, rhs: Fraction) -> Fraction
{
let neg_rhs: Fraction = -rhs;
FracAdd(lhs, neg_rhs)
}
pub fn UFracMul<ln, ld, rn, rd>(UFrac::<ln, ld>: UFraction, UFrac::<rn, rd>: UFraction) -> UFraction
where
ln: Unsigned,
ld: Unsigned + NonZero,
rn: Unsigned,
rd: Unsigned + NonZero,
{
let num: Unsigned = ln * rn;
let deno: Unsigned = ld * rd;
let gcd = Gcd(num, deno);
let num: Unsigned = num / gcd;
let deno: Unsigned + NonZero = deno / gcd;
UFrac::<num, deno>
}
pub fn FracMul<lhs, rhs>(lhs: Fraction, rhs: Fraction) -> Fraction {
match lhs {
#[generics(ln: Unsigned, ld: Unsigned + NonZero)]
PFrac::<UFrac<ln, ld>> => {
match rhs {
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
PFrac::<UFrac<rn, rd>> => {
let num: Unsigned = ln * rn;
let deno: Unsigned + NonZero = ld * rd;
let frac = PFrac::<UFrac<num, deno>>;
Reduce(frac)
}
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
NFrac::<UFrac<rn, rd>> => {
let num: Unsigned = ln * rn;
let deno: Unsigned + NonZero = ld * rd;
let frac = NFrac::<UFrac<num, deno>>;
Reduce(frac)
}
}
}
#[generics(ln: Unsigned, ld: Unsigned + NonZero)]
NFrac::<UFrac<ln, ld>> => {
match rhs {
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
PFrac::<UFrac<rn, rd>> => {
let num: Unsigned = ln * rn;
let deno: Unsigned + NonZero = ld * rd;
let frac = NFrac::<UFrac<num, deno>>;
Reduce(frac)
}
#[generics(rn: Unsigned, rd: Unsigned + NonZero)]
NFrac::<UFrac<rn, rd>> => {
let num: Unsigned = ln * rn;
let deno: Unsigned + NonZero = ld * rd;
let frac = PFrac::<UFrac<num, deno>>;
Reduce(frac)
}
}
}
}
}
pub fn UFracDiv<lhs, rhs>(lhs: UFraction, rhs: UFraction) -> UFraction
{
let reciprocal: UFraction = Reciprocal(rhs);
UFracMul(lhs, reciprocal)
}
pub fn FracDiv<lhs, rhs>(lhs: Fraction, rhs: Fraction) -> Fraction
{
let reciprocal: Fraction = Reciprocal(rhs);
FracMul(lhs, reciprocal)
}
pub fn UFracCmp<ln, ld, rn, rd>(UFrac::<ln, ld>: UFraction, UFrac::<rn, rd>: UFraction)
where
ln: Unsigned,
ld: Unsigned + NonZero,
rn: Unsigned,
rd: Unsigned + NonZero,
{
let lhs = ln * rd;
let rhs = rn * ld;
if lhs > rhs {
Greater
} else if lhs < rhs {
Less
} else {
Equal
}
}
pub fn FracCmp<lhs, rhs>(lhs: Fraction, rhs: Fraction)
{
match (lhs, rhs) {
#[generics(lfrac: UFraction, rfrac: UFraction)]
(PFrac::<lfrac>, PFrac::<rfrac>) => {
UFracCmp(lfrac, rfrac)
}
#[generics(lfrac: UFraction, rfrac: UFraction)]
(PFrac::<lfrac>, NFrac::<rfrac>) => {
Greater
}
#[generics(lfrac: UFraction, rfrac: UFraction)]
(NFrac::<lfrac>, PFrac::<rfrac>) => {
Less
}
#[generics(lfrac: UFraction, rfrac: UFraction)]
(NFrac::<lfrac>, NFrac::<rfrac>) => {
let cmp = UFracCmp(lfrac, rfrac);
match cmp {
Greater => Less,
Equal => Equal,
Less => Greater,
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{control::SameOp, Frac, UFrac};
#[test]
fn frac_test() {
let _: SameOp<ReduceOp<UFrac!(2 / 4)>, UFrac!(1 / 2)> = ();
let _: SameOp<ReduceOp<UFrac!(0 / 4)>, UFrac!(0 / 1)> = ();
let _: SameOp<ReduceOp<Frac!(3 / 9)>, Frac!(1 / 3)> = ();
let _: SameOp<ReduceOp<Frac!(3 / ~9)>, Frac!(~1 / 3)> = ();
let _: SameOp<ReciprocalOp<UFrac!(3 / 2)>, UFrac!(2 / 3)> = ();
let _: SameOp<ReciprocalOp<Frac!(3 / 2)>, Frac!(2 / 3)> = ();
let _: SameOp<ReciprocalOp<Frac!(~3 / 2)>, Frac!(~2 / 3)> = ();
let _: SameOp<UFracAddOp<UFrac!(1 / 2), UFrac!(1 / 3)>, UFrac!(5 / 6)> = ();
let _: SameOp<FracAddOp<Frac!(1 / 2), Frac!(1 / 3)>, Frac!(5 / 6)> = ();
let _: SameOp<FracAddOp<Frac!(1 / 2), Frac!(~1 / 3)>, Frac!(1 / 6)> = ();
let _: SameOp<FracAddOp<Frac!(~1 / 2), Frac!(1 / 3)>, Frac!(~1 / 6)> = ();
let _: SameOp<FracAddOp<Frac!(~1 / 2), Frac!(~1 / 3)>, Frac!(~5 / 6)> = ();
let _: SameOp<UFracSubOp<UFrac!(1 / 2), UFrac!(1 / 3)>, UFrac!(1 / 6)> = ();
let _: SameOp<FracSubOp<Frac!(1 / 2), Frac!(1 / 3)>, Frac!(1 / 6)> = ();
let _: SameOp<FracSubOp<Frac!(1 / 2), Frac!(~1 / 3)>, Frac!(5 / 6)> = ();
let _: SameOp<FracSubOp<Frac!(~1 / 2), Frac!(1 / 3)>, Frac!(~5 / 6)> = ();
let _: SameOp<FracSubOp<Frac!(~1 / 2), Frac!(~1 / 3)>, Frac!(~1 / 6)> = ();
let _: SameOp<UFracMulOp<UFrac!(2 / 3), UFrac!(9 / 4)>, UFrac!(3 / 2)> = ();
let _: SameOp<FracMulOp<Frac!(2 / 3), Frac!(9 / 4)>, Frac!(3 / 2)> = ();
let _: SameOp<FracMulOp<Frac!(~2 / 3), Frac!(9 / 4)>, Frac!(~3 / 2)> = ();
let _: SameOp<FracMulOp<Frac!(2 / 3), Frac!(~9 / 4)>, Frac!(~3 / 2)> = ();
let _: SameOp<FracMulOp<Frac!(~2 / 3), Frac!(~9 / 4)>, Frac!(3 / 2)> = ();
let _: SameOp<UFracDivOp<UFrac!(2 / 3), UFrac!(4 / 9)>, UFrac!(3 / 2)> = ();
let _: SameOp<FracDivOp<Frac!(2 / 3), Frac!(4 / 9)>, Frac!(3 / 2)> = ();
let _: SameOp<FracDivOp<Frac!(~2 / 3), Frac!(4 / 9)>, Frac!(~3 / 2)> = ();
let _: SameOp<FracDivOp<Frac!(2 / 3), Frac!(~4 / 9)>, Frac!(~3 / 2)> = ();
let _: SameOp<FracDivOp<Frac!(~2 / 3), Frac!(~4 / 9)>, Frac!(3 / 2)> = ();
let _: SameOp<UFracCmpOp<UFrac!(1 / 3), UFrac!(1 / 2)>, Less> = ();
let _: SameOp<UFracCmpOp<UFrac!(1 / 2), UFrac!(1 / 3)>, Greater> = ();
let _: SameOp<UFracCmpOp<UFrac!(3 / 7), UFrac!(3 / 7)>, Equal> = ();
let _: SameOp<UFracCmpOp<UFrac!(3 / 7), UFrac!(6 / 14)>, Equal> = ();
let _: SameOp<FracCmpOp<Frac!(1 / 3), Frac!(1 / 2)>, Less> = ();
let _: SameOp<FracCmpOp<Frac!(1 / 3), Frac!(~1 / 2)>, Greater> = ();
let _: SameOp<FracCmpOp<Frac!(~1 / 3), Frac!(1 / 2)>, Less> = ();
let _: SameOp<FracCmpOp<Frac!(~1 / 3), Frac!(~1 / 2)>, Greater> = ();
}
}
|
#![allow(clippy::all)]
fn parse_coordinates(input: &str) -> Vec<(usize, usize)> {
input
.split("\n\n")
.next()
.unwrap()
.split("\n")
.map(|s| {
let mut parts = s.split(",");
let x = parts.next().unwrap().parse::<usize>().unwrap();
let y = parts.next().unwrap().parse::<usize>().unwrap();
(x, y)
})
.collect()
}
#[derive(Debug)]
enum Fold {
X(usize),
Y(usize),
}
fn parse_fold_sequence(input: &str) -> Vec<Fold> {
let mut parts = input.trim().split("\n\n");
let _ = parts.next();
parts
.next()
.unwrap()
.split("\n")
.map(|s| {
if s.contains("fold along x") {
let mut parts = s.split("=");
parts.next().unwrap();
return Fold::X(parts.next().unwrap().parse::<usize>().unwrap());
}
let mut parts = s.split("=");
parts.next().unwrap();
Fold::Y(parts.next().unwrap().parse::<usize>().unwrap())
})
.collect()
}
pub fn part1(input: String) {
let (max_x, max_y) =
parse_coordinates(&input)
.iter()
.fold((0, 0), |(mut max_x, mut max_y), (x, y)| {
if *x > max_x {
max_x = *x;
}
if *y > max_y {
max_y = *y;
}
(max_x, max_y)
});
let original = {
let mut original = TransparentPaper(vec![vec![false; max_x + 1]; max_y + 1]);
parse_coordinates(&input)
.iter()
.for_each(|(x, y)| original[*y][*x] = true);
original
};
let fold_sequence = parse_fold_sequence(&input);
println!("Original shape: {} x {}", original[0].len(), original.len());
println!("Fold sequence: {:?}", fold_sequence);
let final_ = fold_sequence.iter().fold(original, |before_fold, fold| {
let after_fold = match fold {
Fold::X(x) => fold_along_x(before_fold, *x),
Fold::Y(y) => fold_along_y(before_fold, *y),
};
println!("Total visible: {}", after_fold.total_visible());
after_fold
});
println!("Final:\n{}", final_);
}
#[derive(Debug, PartialEq, Eq)]
struct TransparentPaper(Vec<Vec<bool>>);
impl TransparentPaper {
pub fn len(&self) -> usize {
self.0.len()
}
pub fn total_visible(&self) -> usize {
self.0.iter().fold(0, |total_acc, row| {
total_acc
+ row
.iter()
.fold(0, |row_acc, dot| if *dot { row_acc + 1 } else { row_acc })
})
}
}
impl std::ops::Index<usize> for TransparentPaper {
type Output = Vec<bool>;
fn index(&self, index: usize) -> &Self::Output {
&self.0[index]
}
}
impl std::ops::IndexMut<usize> for TransparentPaper {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.0[index]
}
}
impl std::fmt::Display for TransparentPaper {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
for i in 0..self.0.len() {
for j in 0..self.0[0].len() {
if self.0[i][j] {
write!(f, "#")?;
} else {
write!(f, ".")?;
}
}
write!(f, "\n")?;
}
Ok(())
}
}
fn fold_along_x(before_fold: TransparentPaper, x: usize) -> TransparentPaper {
let before_width = before_fold[0].len();
let after_width = if x > before_width / 2 {
x - 1
} else {
before_fold[0].len() - x - 1
};
let n_hanging = if x > before_width / 2 {
0
} else {
(before_width - x) - x - 1
};
let mut after_fold = TransparentPaper(vec![vec![false; after_width]; before_fold.len()]);
for i in 0..before_fold.len() {
for j in x + 1..before_width - n_hanging {
let overlap_idx = (x - 1) - (j - (x + 1));
after_fold[i][n_hanging + overlap_idx] =
before_fold[i][overlap_idx] || before_fold[i][j];
}
if n_hanging > 0 {
for j in 0..n_hanging {
after_fold[i][j] = before_fold[i][before_width - 1 - j];
}
} else {
for j in 0..x - (before_width - (x + 1)) {
after_fold[i][j] = before_fold[i][j];
}
}
}
after_fold
}
fn fold_along_y(before_fold: TransparentPaper, y: usize) -> TransparentPaper {
let before_height = before_fold.len();
let after_height = if y > before_height / 2 {
y - 1
} else {
before_height - y - 1
};
let n_hanging = if y > before_height / 2 {
0
} else {
(before_height - y) - y - 1
};
let mut after_fold = TransparentPaper(vec![vec![false; before_fold[0].len()]; after_height]);
for j in 0..before_fold[0].len() {
for i in y + 1..before_height - n_hanging {
let overlap_idx = (y - 1) - (i - (y + 1));
after_fold[n_hanging + overlap_idx][j] =
before_fold[overlap_idx][j] || before_fold[i][j];
}
if n_hanging > 0 {
for i in 0..n_hanging {
after_fold[i][j] = before_fold[before_height - 1 - i][j];
}
} else {
for i in 0..y - (before_height - (y + 1)) {
after_fold[i][j] = before_fold[i][j];
}
}
}
after_fold
}
|
use crate::floodfill::FloodFill;
use crate::game::Game;
use crate::search::{self, Heuristic};
/// The new floodfill agent for royale games
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(default, deny_unknown_fields)]
pub struct SoloHeuristic {
saturated: f64,
space: f64,
size: f64,
}
impl Default for SoloHeuristic {
fn default() -> Self {
Self {
saturated: 0.1,
space: 1.0,
size: 0.5,
}
}
}
impl Heuristic for SoloHeuristic {
fn eval(&self, game: &Game) -> f64 {
if game.snake_is_alive(0) {
let you = &game.snakes[0];
let area = (game.grid.width * game.grid.height) as f64;
let mut flood_fill = FloodFill::new(game.grid.width, game.grid.height);
let food_distances = flood_fill.flood_snakes(&game.grid, &game.snakes);
let food_distance = food_distances[0] as f64;
let saturated = if food_distance < you.health as f64 {
1.0
} else {
0.0
};
let space = flood_fill.count_space(0) as f64 / area;
let size = (3.0 / you.body.len() as f64).sqrt();
self.saturated * saturated + self.space * space + self.size * size
} else {
search::LOSS
}
}
}
|
pub fn foo() {
println!("Osko {}", "foo");
}
|
#[doc = "Reader of register IM"]
pub type R = crate::R<u32, super::IM>;
#[doc = "Writer for register IM"]
pub type W = crate::W<u32, super::IM>;
#[doc = "Register IM `reset()`'s with value 0"]
impl crate::ResetValue for super::IM {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `RIMIM`"]
pub type RIMIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RIMIM`"]
pub struct RIMIM_W<'a> {
w: &'a mut W,
}
impl<'a> RIMIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `CTSMIM`"]
pub type CTSMIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CTSMIM`"]
pub struct CTSMIM_W<'a> {
w: &'a mut W,
}
impl<'a> CTSMIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `DCDMIM`"]
pub type DCDMIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCDMIM`"]
pub struct DCDMIM_W<'a> {
w: &'a mut W,
}
impl<'a> DCDMIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `DSRMIM`"]
pub type DSRMIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DSRMIM`"]
pub struct DSRMIM_W<'a> {
w: &'a mut W,
}
impl<'a> DSRMIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `RXIM`"]
pub type RXIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXIM`"]
pub struct RXIM_W<'a> {
w: &'a mut W,
}
impl<'a> RXIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `TXIM`"]
pub type TXIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TXIM`"]
pub struct TXIM_W<'a> {
w: &'a mut W,
}
impl<'a> TXIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `RTIM`"]
pub type RTIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RTIM`"]
pub struct RTIM_W<'a> {
w: &'a mut W,
}
impl<'a> RTIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `FEIM`"]
pub type FEIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FEIM`"]
pub struct FEIM_W<'a> {
w: &'a mut W,
}
impl<'a> FEIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `PEIM`"]
pub type PEIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PEIM`"]
pub struct PEIM_W<'a> {
w: &'a mut W,
}
impl<'a> PEIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `BEIM`"]
pub type BEIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BEIM`"]
pub struct BEIM_W<'a> {
w: &'a mut W,
}
impl<'a> BEIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `OEIM`"]
pub type OEIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OEIM`"]
pub struct OEIM_W<'a> {
w: &'a mut W,
}
impl<'a> OEIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `EOTIM`"]
pub type EOTIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EOTIM`"]
pub struct EOTIM_W<'a> {
w: &'a mut W,
}
impl<'a> EOTIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `_9BITIM`"]
pub type _9BITIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `_9BITIM`"]
pub struct _9BITIM_W<'a> {
w: &'a mut W,
}
impl<'a> _9BITIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `DMARXIM`"]
pub type DMARXIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DMARXIM`"]
pub struct DMARXIM_W<'a> {
w: &'a mut W,
}
impl<'a> DMARXIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `DMATXIM`"]
pub type DMATXIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DMATXIM`"]
pub struct DMATXIM_W<'a> {
w: &'a mut W,
}
impl<'a> DMATXIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
impl R {
#[doc = "Bit 0 - UART Ring Indicator Modem Interrupt Mask"]
#[inline(always)]
pub fn rimim(&self) -> RIMIM_R {
RIMIM_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - UART Clear to Send Modem Interrupt Mask"]
#[inline(always)]
pub fn ctsmim(&self) -> CTSMIM_R {
CTSMIM_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - UART Data Carrier Detect Modem Interrupt Mask"]
#[inline(always)]
pub fn dcdmim(&self) -> DCDMIM_R {
DCDMIM_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - UART Data Set Ready Modem Interrupt Mask"]
#[inline(always)]
pub fn dsrmim(&self) -> DSRMIM_R {
DSRMIM_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - UART Receive Interrupt Mask"]
#[inline(always)]
pub fn rxim(&self) -> RXIM_R {
RXIM_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - UART Transmit Interrupt Mask"]
#[inline(always)]
pub fn txim(&self) -> TXIM_R {
TXIM_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - UART Receive Time-Out Interrupt Mask"]
#[inline(always)]
pub fn rtim(&self) -> RTIM_R {
RTIM_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - UART Framing Error Interrupt Mask"]
#[inline(always)]
pub fn feim(&self) -> FEIM_R {
FEIM_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - UART Parity Error Interrupt Mask"]
#[inline(always)]
pub fn peim(&self) -> PEIM_R {
PEIM_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - UART Break Error Interrupt Mask"]
#[inline(always)]
pub fn beim(&self) -> BEIM_R {
BEIM_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - UART Overrun Error Interrupt Mask"]
#[inline(always)]
pub fn oeim(&self) -> OEIM_R {
OEIM_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - End of Transmission Interrupt Mask"]
#[inline(always)]
pub fn eotim(&self) -> EOTIM_R {
EOTIM_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - 9-Bit Mode Interrupt Mask"]
#[inline(always)]
pub fn _9bitim(&self) -> _9BITIM_R {
_9BITIM_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 16 - Receive DMA Interrupt Mask"]
#[inline(always)]
pub fn dmarxim(&self) -> DMARXIM_R {
DMARXIM_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - Transmit DMA Interrupt Mask"]
#[inline(always)]
pub fn dmatxim(&self) -> DMATXIM_R {
DMATXIM_R::new(((self.bits >> 17) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - UART Ring Indicator Modem Interrupt Mask"]
#[inline(always)]
pub fn rimim(&mut self) -> RIMIM_W {
RIMIM_W { w: self }
}
#[doc = "Bit 1 - UART Clear to Send Modem Interrupt Mask"]
#[inline(always)]
pub fn ctsmim(&mut self) -> CTSMIM_W {
CTSMIM_W { w: self }
}
#[doc = "Bit 2 - UART Data Carrier Detect Modem Interrupt Mask"]
#[inline(always)]
pub fn dcdmim(&mut self) -> DCDMIM_W {
DCDMIM_W { w: self }
}
#[doc = "Bit 3 - UART Data Set Ready Modem Interrupt Mask"]
#[inline(always)]
pub fn dsrmim(&mut self) -> DSRMIM_W {
DSRMIM_W { w: self }
}
#[doc = "Bit 4 - UART Receive Interrupt Mask"]
#[inline(always)]
pub fn rxim(&mut self) -> RXIM_W {
RXIM_W { w: self }
}
#[doc = "Bit 5 - UART Transmit Interrupt Mask"]
#[inline(always)]
pub fn txim(&mut self) -> TXIM_W {
TXIM_W { w: self }
}
#[doc = "Bit 6 - UART Receive Time-Out Interrupt Mask"]
#[inline(always)]
pub fn rtim(&mut self) -> RTIM_W {
RTIM_W { w: self }
}
#[doc = "Bit 7 - UART Framing Error Interrupt Mask"]
#[inline(always)]
pub fn feim(&mut self) -> FEIM_W {
FEIM_W { w: self }
}
#[doc = "Bit 8 - UART Parity Error Interrupt Mask"]
#[inline(always)]
pub fn peim(&mut self) -> PEIM_W {
PEIM_W { w: self }
}
#[doc = "Bit 9 - UART Break Error Interrupt Mask"]
#[inline(always)]
pub fn beim(&mut self) -> BEIM_W {
BEIM_W { w: self }
}
#[doc = "Bit 10 - UART Overrun Error Interrupt Mask"]
#[inline(always)]
pub fn oeim(&mut self) -> OEIM_W {
OEIM_W { w: self }
}
#[doc = "Bit 11 - End of Transmission Interrupt Mask"]
#[inline(always)]
pub fn eotim(&mut self) -> EOTIM_W {
EOTIM_W { w: self }
}
#[doc = "Bit 12 - 9-Bit Mode Interrupt Mask"]
#[inline(always)]
pub fn _9bitim(&mut self) -> _9BITIM_W {
_9BITIM_W { w: self }
}
#[doc = "Bit 16 - Receive DMA Interrupt Mask"]
#[inline(always)]
pub fn dmarxim(&mut self) -> DMARXIM_W {
DMARXIM_W { w: self }
}
#[doc = "Bit 17 - Transmit DMA Interrupt Mask"]
#[inline(always)]
pub fn dmatxim(&mut self) -> DMATXIM_W {
DMATXIM_W { w: self }
}
}
|
use core::fmt;
use core::hash::{Hash, Hasher};
use core::ops::Deref;
use super::BigInteger;
#[derive(Copy, Clone, PartialEq, PartialOrd)]
#[repr(transparent)]
pub struct Float(pub f64);
impl Float {
const I64_UPPER_BOUNDARY: f64 = (1i64 << f64::MANTISSA_DIGITS) as f64;
const I64_LOWER_BOUNDARY: f64 = (-1i64 << f64::MANTISSA_DIGITS) as f64;
#[inline]
pub fn as_f64(&self) -> f64 {
self.0
}
}
impl fmt::Debug for Float {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl fmt::Display for Float {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
impl Into<f64> for Float {
#[inline(always)]
fn into(self) -> f64 {
self.0
}
}
impl From<f64> for Float {
#[inline(always)]
fn from(f: f64) -> Self {
Self(f)
}
}
impl TryInto<i64> for Float {
type Error = ();
fn try_into(self) -> Result<i64, Self::Error> {
if self.is_nan() || self.is_infinite() {
return Err(());
}
Ok(unsafe { self.0.to_int_unchecked() })
}
}
impl Hash for Float {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.to_bits().hash(state)
}
}
impl Deref for Float {
type Target = f64;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl PartialEq<i64> for Float {
fn eq(&self, y: &i64) -> bool {
match self.0 {
x if x.is_infinite() => false,
x if x >= Self::I64_UPPER_BOUNDARY || x <= Self::I64_LOWER_BOUNDARY => {
// We're out of the range where f64 is more precise than an i64,
// so cast the float to integer and comapre.
//
// # Safety
//
// We've guarded against infinite values, the float cannot be NaN
// due to our encoding scheme, and we know the value can be represented
// in i64, so this is guaranteed safe.
let x: i64 = unsafe { x.to_int_unchecked() };
x.eq(y)
}
x => x.eq(&(*y as f64)),
}
}
}
impl PartialEq<BigInteger> for Float {
fn eq(&self, y: &BigInteger) -> bool {
let Some(y) = y.as_i64() else { return false; };
self.eq(&y)
}
}
impl PartialOrd<i64> for Float {
fn partial_cmp(&self, y: &i64) -> Option<core::cmp::Ordering> {
use core::cmp::Ordering;
match self.0 {
x if x.is_infinite() => {
if x.is_sign_negative() {
Some(Ordering::Less)
} else {
Some(Ordering::Greater)
}
}
x if x >= Self::I64_UPPER_BOUNDARY || x <= Self::I64_LOWER_BOUNDARY => {
// We're out of the range where f64 is more precise than an i64,
// so cast the float to integer and comapre.
//
// # Safety
//
// We've guarded against infinite values, the float cannot be NaN
// due to our encoding scheme, and we know the value can be represented
// in i64, so this is guaranteed safe.
let x: i64 = unsafe { x.to_int_unchecked() };
Some(x.cmp(y))
}
x => x.partial_cmp(&(*y as f64)),
}
}
}
impl PartialOrd<BigInteger> for Float {
fn partial_cmp(&self, y: &BigInteger) -> Option<core::cmp::Ordering> {
use core::cmp::Ordering;
match self.0 {
x if x.is_infinite() => {
if x.is_sign_negative() {
Some(Ordering::Less)
} else {
Some(Ordering::Greater)
}
}
_ => {
let too_large = if y.is_negative() {
Ordering::Greater
} else {
Ordering::Less
};
let Some(y) = y.as_i64() else { return Some(too_large); };
self.partial_cmp(&y)
}
}
}
}
|
// Copyright 2021 Tomba technology web service LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Tomba Email Finder data structures.
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct Finder {
#[serde(rename = "data")]
pub data: FinderData,
}
#[derive(Serialize, Deserialize)]
pub struct FinderData {
#[serde(rename = "email")]
pub email: String,
#[serde(rename = "first_name")]
pub first_name: String,
#[serde(rename = "last_name")]
pub last_name: String,
#[serde(rename = "full_name")]
pub full_name: String,
#[serde(rename = "country")]
pub country: String,
#[serde(rename = "position")]
pub position: Option<serde_json::Value>,
#[serde(rename = "twitter")]
pub twitter: Option<serde_json::Value>,
#[serde(rename = "linkedin")]
pub linkedin: String,
#[serde(rename = "phone_number")]
pub phone_number: Option<serde_json::Value>,
#[serde(rename = "accept_all")]
pub accept_all: Option<serde_json::Value>,
#[serde(rename = "website_url")]
pub website_url: String,
#[serde(rename = "company")]
pub company: String,
#[serde(rename = "score")]
pub score: i64,
#[serde(rename = "verification")]
pub verification: FinderVerification,
#[serde(rename = "sources")]
pub sources: Vec<Option<serde_json::Value>>,
}
#[derive(Serialize, Deserialize)]
pub struct FinderVerification {
#[serde(rename = "date")]
pub date: Option<serde_json::Value>,
#[serde(rename = "status")]
pub status: Option<serde_json::Value>,
}
|
use std::collections::BTreeMap;
use char_case::{Case, CharCaseT};
use char_case::CharCaseT::*;
use value::Value;
use value::Value::*;
#[derive(Debug, PartialEq, Clone, Hash, Eq)]
pub struct Slot {
pub value: Value, // needs to enforse that only Unknown values can have replaces & purges fields
pub replaces: Replaces,
pub purges: Purges,
}
impl Slot {
pub fn new(id: usize) -> Slot {
Slot {
value: Value::new_unknown(id),
replaces: Replaces::new(),
purges: Purges::new(),
}
}
pub fn new_value(c: char) -> Slot {
Slot {
value: Value(c),
replaces: Replaces::new(),
purges: Purges::new(),
}
}
pub fn u(&mut self) {
self.value = match self.value.clone() {
Unknown { id, .. } => Unknown { case: UPPER, id },
Value(c) => Value(c.uc()),
};
self.replaces.urest();
}
pub fn l(&mut self) {
self.value = match self.value.clone() {
Unknown { id, .. } => Unknown { case: LOWER, id },
Value(c) => Value(c.lc()),
};
self.replaces.lrest();
}
pub fn t(&mut self) {
self.value = match self.value.clone() {
Unknown { id, case } => Unknown {
case: case.invert(),
id,
},
Value(c) => Value(c.switch_case()),
};
self.replaces.trest();
}
pub fn replace(&mut self, from: char, to: char) {
match self.value {
Unknown { ref case, .. } => {
let mut slot_from = From {
from,
case: case.clone(),
};
// 1.2
if self.replaces.contains(&slot_from) || self.purges.contains(&slot_from) {
return;
}
// 1.4: same! sAbsabl, sAblsab
if self.replaces.update_invert_replaces_case_and_add_new_from(
from,
case.clone(),
to,
) {
self.purges.add(From { from, case: ANY });
self.purges.update_invert_from_case(from, case.clone());
} else {
if !case.default() && from.case() == *case {
/*1.6*/
self.replaces.add(
From {
from: from.switch_case(),
case: ANY,
},
to,
);
self.replaces.add(From { from, case: ANY }, to);
self.purges.add(From {
from: from.switch_case(),
case: ANY,
});
self.purges.add(From { from, case: ANY });
} else if case.default() || from.case() == *case {
/*1.5*/
if from.is_numeric() {
slot_from = From { from, case: ANY };
}
/*1.3*/
self.replaces.add(slot_from.clone(), to);
self.purges.add(slot_from.clone());
}
/*2.1, 2.2*/
let mut keys_to_remove: Vec<From> = Vec::new();
for (old_from, old_to) in &mut self.replaces.0 {
if *old_to == from {
if old_from.from == to {
keys_to_remove.push(old_from.clone());
// or use retain method
} else {
*old_to = to;
}
}
}
self.replaces.remove(keys_to_remove.clone());
self.purges.remove(keys_to_remove);
}
}
Value(c) => if c == from {
self.value = Value(to);
},
}
}
}
#[derive(Debug, PartialEq, Hash, Eq, Ord, PartialOrd, Clone)]
pub struct From {
pub from: char,
pub case: CharCaseT,
}
#[derive(Debug, PartialEq, Eq, Hash, Ord, PartialOrd, Clone)]
pub struct Purges(BTreeMap<From, ()>);
impl Purges {
pub fn new() -> Purges {
Purges(BTreeMap::new())
}
pub fn update_invert_from_case(&mut self, from: char, case: CharCaseT) {
self.update_key(from, case, from, ANY);
}
pub fn update_key(
&mut self,
old_from: char,
old_case: CharCaseT,
new_from: char,
new_case: CharCaseT,
) {
self.0.remove(&From {
from: old_from,
case: old_case,
});
self.0.insert(
From {
from: new_from,
case: new_case,
},
(),
);
}
pub fn contains(&self, from: &From) -> bool {
self.0.contains_key(from)
}
pub fn add(&mut self, from: From) {
self.0.insert(from, ());
}
pub fn remove(&mut self, keys: Vec<From>) {
for key in keys {
self.0.remove(&key);
}
}
}
#[derive(Debug, PartialEq, Eq, Hash, Ord, PartialOrd, Clone)]
pub struct Replaces(pub BTreeMap<From, char>);
impl Replaces {
pub fn new() -> Replaces {
Replaces(BTreeMap::new())
}
pub fn update_invert_from_case(&mut self, from: char, case: CharCaseT, to: char) {
self.update_key(from, case, from, ANY, to);
}
pub fn update_key(
&mut self,
old_from: char,
old_case: CharCaseT,
new_from: char,
new_case: CharCaseT,
new_to: char,
) {
self.0.remove(&From {
from: old_from,
case: old_case,
});
self.0.insert(
From {
from: new_from,
case: new_case,
},
new_to,
);
}
pub fn update_invert_replaces_case_and_add_new_from(
&mut self,
from: char,
case: CharCaseT,
to: char,
) -> bool {
if self.0
.get(&From {
from: from.switch_case(),
case: case.clone(),
})
.is_some()
{
self.update_invert_from_case(from.switch_case(), case, to);
self.0.insert(From { from, case: ANY }, to);
return true;
}
false
}
pub fn add(&mut self, from: From, to: char) {
self.0.insert(from, to);
}
pub fn lrest(&mut self) {
self.update_case_to(LOWER);
}
pub fn urest(&mut self) {
self.update_case_to(UPPER);
}
pub fn update_case_to(&mut self, case: CharCaseT) {
for val in self.0.values_mut() {
match case {
LOWER => *val = val.lc(),
UPPER => *val = val.uc(),
_ => {}
}
}
}
pub fn trest(&mut self) {
for val in self.0.values_mut() {
*val = val.switch_case()
}
}
pub fn contains(&self, from: &From) -> bool {
self.0.contains_key(from)
}
pub fn remove(&mut self, keys: Vec<From>) {
for key in keys {
self.0.remove(&key);
}
}
}
|
use async_std::sync::{channel, Receiver, Sender};
use async_std::task;
use async_trait::async_trait;
use super::{Entry, QResult, Query};
use crate::error::*;
#[async_trait]
pub trait AsyncResults {
fn query(&self) -> &Query;
async fn next(&self) -> Option<QResult>;
fn next_sync(&self) -> Option<QResult>;
fn rest(&self) -> Result<Vec<Entry>>;
}
pub struct AsyncResult {
query: Query,
res: Receiver<QResult>,
}
#[async_trait]
impl AsyncResults for AsyncResult {
fn query(&self) -> &Query {
&self.query
}
async fn next(&self) -> Option<Result<Entry>> {
self.res.recv().await
}
fn next_sync(&self) -> Option<Result<Entry>> {
task::block_on(self.next())
}
fn rest(&self) -> Result<Vec<Entry>> {
let mut es = vec![];
while let Some(r) = self.next_sync() {
let e = r?;
es.push(e);
}
Ok(es)
}
}
pub type ResultChannel = (Sender<Result<Entry>>, Receiver<QResult>);
pub struct AsyncResultBuilder {
query: Query,
output: ResultChannel,
}
const NORMAL_BUF_SIZE: usize = 1;
impl AsyncResultBuilder {
pub fn new(q: Query) -> Self {
AsyncResultBuilder {
query: q,
output: channel(NORMAL_BUF_SIZE),
}
}
pub fn results(self) -> AsyncResult {
AsyncResult {
query: self.query,
res: self.output.1,
}
}
}
// TODO need more info to complete the rest
|
pub mod memory;
mod operations;
pub mod program;
mod values;
|
pub mod futures;
pub mod executor; |
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[doc(hidden)]
pub struct IPlaylist(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPlaylist {
type Vtable = IPlaylist_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x803736f5_cf44_4d97_83b3_7a089e9ab663);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPlaylist_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(all(feature = "Foundation_Collections", feature = "Storage"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Foundation_Collections", feature = "Storage")))] usize,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
#[cfg(all(feature = "Foundation", feature = "Storage"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, savelocation: ::windows::core::RawPtr, desiredname: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, option: super::super::Storage::NameCollisionOption, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Foundation", feature = "Storage")))] usize,
#[cfg(all(feature = "Foundation", feature = "Storage"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, savelocation: ::windows::core::RawPtr, desiredname: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, option: super::super::Storage::NameCollisionOption, playlistformat: PlaylistFormat, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Foundation", feature = "Storage")))] usize,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IPlaylistStatics(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IPlaylistStatics {
type Vtable = IPlaylistStatics_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xc5c331cd_81f9_4ff3_95b9_70b6ff046b68);
}
#[repr(C)]
#[doc(hidden)]
pub struct IPlaylistStatics_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(all(feature = "Foundation", feature = "Storage"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, file: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Foundation", feature = "Storage")))] usize,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct Playlist(pub ::windows::core::IInspectable);
impl Playlist {
pub fn new() -> ::windows::core::Result<Self> {
Self::IActivationFactory(|f| f.activate_instance::<Self>())
}
fn IActivationFactory<R, F: FnOnce(&::windows::core::IActivationFactory) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<Playlist, ::windows::core::IActivationFactory> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
#[cfg(all(feature = "Foundation_Collections", feature = "Storage"))]
pub fn Files(&self) -> ::windows::core::Result<super::super::Foundation::Collections::IVector<super::super::Storage::StorageFile>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::Foundation::Collections::IVector<super::super::Storage::StorageFile>>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn SaveAsync(&self) -> ::windows::core::Result<super::super::Foundation::IAsyncAction> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::Foundation::IAsyncAction>(result__)
}
}
#[cfg(all(feature = "Foundation", feature = "Storage"))]
pub fn SaveAsAsync<'a, Param0: ::windows::core::IntoParam<'a, super::super::Storage::IStorageFolder>, Param1: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, savelocation: Param0, desiredname: Param1, option: super::super::Storage::NameCollisionOption) -> ::windows::core::Result<super::super::Foundation::IAsyncOperation<super::super::Storage::StorageFile>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), savelocation.into_param().abi(), desiredname.into_param().abi(), option, &mut result__).from_abi::<super::super::Foundation::IAsyncOperation<super::super::Storage::StorageFile>>(result__)
}
}
#[cfg(all(feature = "Foundation", feature = "Storage"))]
pub fn SaveAsWithFormatAsync<'a, Param0: ::windows::core::IntoParam<'a, super::super::Storage::IStorageFolder>, Param1: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, savelocation: Param0, desiredname: Param1, option: super::super::Storage::NameCollisionOption, playlistformat: PlaylistFormat) -> ::windows::core::Result<super::super::Foundation::IAsyncOperation<super::super::Storage::StorageFile>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), savelocation.into_param().abi(), desiredname.into_param().abi(), option, playlistformat, &mut result__).from_abi::<super::super::Foundation::IAsyncOperation<super::super::Storage::StorageFile>>(result__)
}
}
#[cfg(all(feature = "Foundation", feature = "Storage"))]
pub fn LoadAsync<'a, Param0: ::windows::core::IntoParam<'a, super::super::Storage::IStorageFile>>(file: Param0) -> ::windows::core::Result<super::super::Foundation::IAsyncOperation<Playlist>> {
Self::IPlaylistStatics(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), file.into_param().abi(), &mut result__).from_abi::<super::super::Foundation::IAsyncOperation<Playlist>>(result__)
})
}
pub fn IPlaylistStatics<R, F: FnOnce(&IPlaylistStatics) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<Playlist, IPlaylistStatics> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
}
unsafe impl ::windows::core::RuntimeType for Playlist {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.Media.Playlists.Playlist;{803736f5-cf44-4d97-83b3-7a089e9ab663})");
}
unsafe impl ::windows::core::Interface for Playlist {
type Vtable = IPlaylist_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x803736f5_cf44_4d97_83b3_7a089e9ab663);
}
impl ::windows::core::RuntimeName for Playlist {
const NAME: &'static str = "Windows.Media.Playlists.Playlist";
}
impl ::core::convert::From<Playlist> for ::windows::core::IUnknown {
fn from(value: Playlist) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&Playlist> for ::windows::core::IUnknown {
fn from(value: &Playlist) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for Playlist {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a Playlist {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<Playlist> for ::windows::core::IInspectable {
fn from(value: Playlist) -> Self {
value.0
}
}
impl ::core::convert::From<&Playlist> for ::windows::core::IInspectable {
fn from(value: &Playlist) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for Playlist {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a Playlist {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct PlaylistFormat(pub i32);
impl PlaylistFormat {
pub const WindowsMedia: PlaylistFormat = PlaylistFormat(0i32);
pub const Zune: PlaylistFormat = PlaylistFormat(1i32);
pub const M3u: PlaylistFormat = PlaylistFormat(2i32);
}
impl ::core::convert::From<i32> for PlaylistFormat {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for PlaylistFormat {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for PlaylistFormat {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.Media.Playlists.PlaylistFormat;i4)");
}
impl ::windows::core::DefaultType for PlaylistFormat {
type DefaultType = Self;
}
|
//! # OpenTelemetry Distributed Context API
//!
//! OpenTelemetry uses `Propagators` to serialize and deserialize `SpanContext`
//! into a binary or text format. Currently there are two types of propagators:
//!
//! - `BinaryFormat` which is used to serialize and deserialize a value into
//! a binary representation.
//! - `HTTPTextFormat` which is used to inject and extract a value as text into
//! `Carrier`s that travel in-band across process boundaries.
pub mod b3_propagator;
pub mod binary_propagator;
pub mod trace_context_propagator;
|
impl Solution {
pub fn kids_with_candies(candies: Vec<i32>, extra_candies: i32) -> Vec<bool> {
let ans: Vec<bool> = Vec::new();
let mut max_amount = 0;
for i in 0..candies.len() {
if max_amount < candies[i] {
max_amount = candies[i];
}
}
for i in 0..candies.len() {
ans.push(candies[i] + extra_candies > max_amount);
}
ans
}
} |
use proconio::input;
fn main() {
input! {
n: usize,
k: usize,
a: [u32; n],
};
let mut b = vec![vec![]; k];
for i in 0..n {
b[i % k].push(a[i]);
}
for i in 0..k {
b[i].sort();
}
let mut c = vec![0; n];
for i in 0..k {
for (j, &x) in b[i].iter().enumerate() {
c[i + j * k] = x;
}
}
for w in c.windows(2) {
if w[0] > w[1] {
println!("No");
return;
}
}
println!("Yes");
}
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(specialization)]
trait Foo: Copy + ToString {}
impl<T: Copy + ToString> Foo for T {}
fn hide<T: Foo>(x: T) -> impl Foo {
x
}
trait Leak: Sized {
type T;
fn leak(self) -> Self::T;
}
impl<T> Leak for T {
default type T = ();
default fn leak(self) -> Self::T { panic!() }
}
impl Leak for i32 {
type T = i32;
fn leak(self) -> i32 { self }
}
fn main() {
let _: u32 = hide(0_u32);
//~^ ERROR mismatched types
//~| expected type `u32`
//~| found type `impl Foo`
//~| expected u32, found anonymized type
let _: i32 = Leak::leak(hide(0_i32));
//~^ ERROR mismatched types
//~| expected type `i32`
//~| found type `<impl Foo as Leak>::T`
//~| expected i32, found associated type
let mut x = (hide(0_u32), hide(0_i32));
x = (x.1,
//~^ ERROR mismatched types
//~| expected u32, found i32
x.0);
//~^ ERROR mismatched types
//~| expected i32, found u32
}
|
use std::error;
type Error = Box<dyn error::Error>;
/*
macro_rules! error {
($fmt:literal $(, $arg:expr)*) => { Error::from(format!($fmt $(, $arg)* )) };
}
*/
fn main() -> Result<(), Error> {
for filename in std::env::args().skip(1) {
print!("{}", filename);
let mut adaptors: Vec<Joltage> = std::fs::read_to_string(filename)?
.lines()
.map(|line| line.parse().map_err(Error::from))
.collect::<Result<_, Error>>()?;
adaptors.sort();
println!("\tpart 1: product of 1-joltage differences and 3-joltage differences");
println!("\t{}", part1(&adaptors));
println!("\tpart 2: number of different adaptor chains");
println!("\t{}", part2(&adaptors));
}
Ok(())
}
type Joltage = usize;
fn part1(adaptors: &[Joltage]) -> u64 {
let mut counts = [0; 4];
let mut prev = 0;
for &r in adaptors {
counts[r - prev] += 1;
prev = r;
}
counts[3] += 1;
return counts[1] * counts[3];
}
fn part2(adaptors: &[Joltage]) -> u64 {
let mut a = 0; // ways to add to n - 2
let mut b = 0; // ways to add to n - 1
let mut c = 1; // ways to add to n
let mut n = 0;
for &r in adaptors {
match r - n {
1 => {
let d = a + b + c;
a = b;
b = c;
c = d;
}
2 => {
let d = 0;
let e = b + c + d;
a = c;
b = d;
c = e;
}
3 => {
let d = 0;
let e = 0;
let f = c;
a = d;
b = e;
c = f;
}
_ => {
a = 0;
b = 0;
c = 0;
}
}
n = r;
}
c
}
|
//! Time helpers
//!
//! These are copied from stm32f30x-hal's time helpers (from
//! <https://github.com/japaric/stm32f30x-hal/blob/master/src/time.rs>) on demand.
/// Hertz
#[derive(Clone, Copy)]
pub struct Hertz(pub u32);
|
use std::collections::BTreeMap;
use crate::handlers::ansible::AnsibleCommand;
use crate::handlers::models::hosts::HostError;
use super::models::hosts::NewHost;
use super::rpc::client::Client;
use super::*;
use axum::extract::{Json, Path};
use models::hosts as host_model;
use models::hosts::Host;
use models::hosts::Status as HostStatus;
use sqlx::PgPool;
use tokio::task::JoinHandle;
#[derive(Debug)]
enum HostHandlerError {
CannotAddHost(String),
CannotUpdateHost(Uuid),
HostNotFound(Uuid),
NameAlreadyExists(String),
AddressAlreadyExists(String),
Other,
}
impl From<HostHandlerError> for ServerError {
fn from(err: HostHandlerError) -> Self {
match err {
HostHandlerError::CannotAddHost(name) => {
ServerError::Internal(format!("Cannot add host {}", name))
}
HostHandlerError::CannotUpdateHost(host_id) => {
ServerError::Internal(format!("Cannot update host {}", host_id))
}
HostHandlerError::AddressAlreadyExists(address) => {
ServerError::Validation(format!("host address '{}' already exists", address))
}
HostHandlerError::HostNotFound(id) => {
ServerError::EntityNotFound(format!("host id '{}' not found", id))
}
HostHandlerError::NameAlreadyExists(name) => {
ServerError::Validation(format!("address {} already exists", name))
}
HostHandlerError::Other => ServerError::Internal(format!("Internal error")),
}
}
}
impl From<HostError> for HostHandlerError {
fn from(err: HostError) -> Self {
match err {
HostError::Add(host_id, e) => {
tracing::error!("cannot add host '{}': {}", host_id, e);
HostHandlerError::CannotAddHost(host_id)
}
HostError::Find(host_id, e @ sqlx::Error::RowNotFound) => {
tracing::error!("cannot find host '{}': {}", host_id, e);
HostHandlerError::HostNotFound(host_id)
}
HostError::Find(_, e) => {
tracing::error!("Unexpected error: {}", e);
HostHandlerError::Other
}
HostError::List(e) => {
tracing::error!("cannot list hosts: {}", e);
HostHandlerError::Other
}
HostError::Update(host_id, e) => {
tracing::error!("cannot update host '{}': {}", host_id, e);
HostHandlerError::CannotUpdateHost(host_id)
}
HostError::Other(e) => {
tracing::error!("Unexpected error: {}", e);
HostHandlerError::Other
}
}
}
}
pub async fn list(
Extension(env): Extension<Environment>,
) -> Result<ApiResponse<Vec<Host>>, ServerError> {
let hosts = host_model::list(env.db())
.await
.map_err(HostHandlerError::from)?;
Ok(ApiResponse {
data: hosts,
code: StatusCode::OK,
})
}
pub async fn add(
Extension(env): Extension<Environment>,
Json(host): Json<NewHost>,
) -> Result<ApiResponse<Uuid>, ServerError> {
if host_model::by_name(env.db(), &host.name).await.is_ok() {
Err(HostHandlerError::NameAlreadyExists(host.name.to_string()))?
}
if host_model::by_address(env.db(), &host.address)
.await
.is_ok()
{
Err(HostHandlerError::AddressAlreadyExists(
host.address.to_string(),
))?
}
let host_id = host_model::add(env.db(), &host)
.await
.map_err(HostHandlerError::from)?;
Ok(ApiResponse {
data: host_id,
code: StatusCode::CREATED,
})
}
pub async fn get(
Extension(env): Extension<Environment>,
Path(host_id): Path<Uuid>,
) -> Result<ApiResponse<Host>, ServerError> {
let host = host_model::by_id(env.db(), &host_id)
.await
.map_err(HostHandlerError::from)?;
Ok(ApiResponse {
data: host,
code: StatusCode::CREATED,
})
}
pub async fn install(
Extension(env): Extension<Environment>,
Path(host_id): Path<Uuid>,
) -> Result<ApiResponse<String>, ServerError> {
let host = host_model::by_id(env.db(), &host_id)
.await
.map_err(HostHandlerError::from)?;
host_model::update_status(env.db(), host_id, HostStatus::Installing)
.await
.map_err(HostHandlerError::from)?;
let mut extra_params = BTreeMap::new();
extra_params.insert(String::from("ansible_password"), host.password.to_owned());
extra_params.insert(
String::from("fcversion"),
dotenv::var("FC_VERSION").expect("FC_VERSION is not set!"),
);
extra_params.insert(
String::from("local_node_path"),
dotenv::var("LOCAL_NODE_PATH").unwrap_or_else(|_| String::from("")),
);
tokio::spawn(async move {
let playbook = AnsibleCommand::new(
ansible::INSTALL_HOST_PLAYBOOK,
&host.host_user,
&host.address,
extra_params,
);
match playbook.run_playbook().await {
Ok(_) => {
tracing::info!("Installation successful");
host_model::update_status(env.db(), host_id, HostStatus::Up)
.await
.unwrap();
}
Err(e) => {
tracing::error!("Installation failed: {}", e);
host_model::update_status(env.db(), host_id, HostStatus::InstallationFailed)
.await
.unwrap();
}
}
});
Ok(ApiResponse {
code: StatusCode::ACCEPTED,
data: String::from("started"),
})
}
pub async fn find_running_host(pool: &PgPool) -> Result<Host, ServerError> {
let hosts = host_model::by_status(pool, HostStatus::Up)
.await
.map_err(HostHandlerError::from)?;
if hosts.is_empty() {
return Err(ServerError::EntityNotFound(String::from("host")));
}
Ok(hosts[0].clone())
}
pub async fn initalize_hosts(env: Environment) -> Result<(), ServerError> {
// TODO: add lookup method that can search multiple statuses
let running_hosts = host_model::by_status(env.db(), HostStatus::Up)
.await
.map_err(HostHandlerError::from)?;
let unknown_hosts = host_model::by_status(env.db(), HostStatus::Unknown)
.await
.map_err(HostHandlerError::from)?;
if running_hosts.is_empty() && unknown_hosts.is_empty() {
tracing::info!("No hosts were running or unknown, skipping initialization...");
return Ok(());
}
let mut join_handles = vec![];
for host in [running_hosts, unknown_hosts].concat() {
let env = env.clone();
let join_handle: JoinHandle<()> = tokio::spawn(async move {
initialize_host(&host, env).await;
});
join_handles.push(join_handle);
}
for handle in join_handles {
let _ = handle.await;
}
Ok(())
}
pub async fn health_check(
Extension(env): Extension<Environment>,
Path(host_id): Path<Uuid>,
) -> Result<ApiResponse<String>, ServerError> {
let host = if let Ok(host) = host_model::by_id(env.db(), &host_id).await {
host
} else {
tracing::error!("Failed to find host: {}", host_id);
return Err(ServerError::Validation(host_id.to_string()));
};
if health_check_internal(&host).await.is_err() {
tracing::error!("Healthcheck for host: {} failed", host_id);
return Err(ServerError::Validation(host_id.to_string()));
}
Ok(ApiResponse {
code: StatusCode::OK,
data: String::from("ok"),
})
}
async fn health_check_internal(host: &Host) -> Result<(), String> {
match Client::connect(format!("{}:{}", host.address, host.port).parse().unwrap()).await {
Ok(client) => {
if let Err(e) = client.clone().health_check().await {
tracing::error!("Healthcheck failed: {}", e);
return Err(e.to_string());
}
Ok(())
}
Err(e) => {
tracing::error!("Could not connect to host {}, error: {}", host.id, e);
return Err(String::from("Could not connect to host"));
}
}
}
async fn initialize_host(host: &Host, env: Environment) {
host_model::update_status(env.db(), host.id, HostStatus::Initializing)
.await
.unwrap();
tracing::info!("Initializing host: {}...", host.id);
if let Err(e) = health_check_internal(&host).await {
let _ = host_model::update_status(env.db(), host.id, HostStatus::Unknown).await;
tracing::error!("Failed to initialize host: {}, error: {}", host.id, e);
return;
}
host_model::update_status(env.db(), host.id, HostStatus::Up)
.await
.unwrap();
tracing::info!("Host {} initialized...", host.id);
}
#[cfg(test)]
mod tests {
use super::*;
use dotenv::dotenv;
use http::Request;
use hyper::Body;
use sqlx::{migrate::MigrateDatabase, postgres, PgPool};
use tower::ServiceExt;
use crate::database;
async fn setup() -> anyhow::Result<PgPool> {
dotenv().ok();
let db_url = &dotenv::var("TEST_DATABASE_URL").expect("DATABASE_URL is not set!");
database::run_migrations(db_url).await.unwrap();
let pool = database::connect(&db_url).await?;
Ok(pool)
}
async fn teardown(pool: &PgPool) {
pool.close().await;
let db_url = &dotenv::var("TEST_DATABASE_URL").expect("DATABASE_URL is not set!");
postgres::Postgres::drop_database(db_url).await.unwrap();
}
#[tokio::test]
async fn test_add() {
let pool = setup().await.unwrap();
let env = Environment::new(pool.clone()).await.unwrap();
let app = app(env.clone()).await;
let host = NewHost {
name: String::from("test_host"),
address: String::from("127.0.0.1"),
port: 8080,
host_user: String::from("root"),
password: String::from("pass"),
};
let response = app
.oneshot(
Request::builder()
.method(http::Method::POST)
.uri("/hosts")
.header(http::header::CONTENT_TYPE, "application/json")
.body(Body::from(json!(host).to_string()))
.unwrap(),
)
.await
.unwrap();
assert_eq!(StatusCode::CREATED, response.status());
teardown(env.db()).await;
}
}
|
use proconio::input;
fn main() {
input! {
n: usize,
rects: [(usize, usize, usize, usize); n],
};
let mut sheet = vec![vec![0; 101]; 101];
for (a, b, c, d) in rects {
for y in c..d {
for x in a..b {
sheet[y][x] += 1;
}
}
}
let mut ans = 0;
for y in 0..=100 {
for x in 0..=100 {
if sheet[y][x] >= 1 {
ans += 1;
};
}
}
println!("{}", ans);
}
|
// Day 8
use std::error::Error;
use std::fs;
use std::process;
fn main() {
let input_filename = "input.txt";
if let Err(e) = run(input_filename) {
println!("Application error: {}", e);
process::exit(1);
}
}
fn run(filename: &str) -> Result<(), Box<dyn Error>> {
// Read the input file
let contents = fs::read_to_string(filename)?;
// find the layer that contains the fewest 0 digits. On that layer, what is the number of 1 digits multiplied by the number of 2 digits?
let img_width = 25;
let img_height = 6;
// Read the input into a tensor, which we can index image[k][i][j]
// We actually don't need to do that yet.
let raw_image_data: Vec<u32> = contents
.trim()
.chars()
.map(|d| d.to_digit(10).unwrap())
.collect();
// split into layers (requires the image data to be the correct length)
assert_eq!(raw_image_data.len() % (img_height * img_width), 0);
let mut layers: Vec<Vec<u32>> = Vec::new();
for layer in raw_image_data.chunks(img_height * img_width) {
let layer = layer.to_vec();
layers.push(layer.clone());
}
// Get the layer with the smallest number of zeros
let mut min_zeros_index = 0;
let mut min_num_zeros = 100000;
let mut i = 0;
for layer in &layers {
let num_zeros = layer.iter().filter(|&n| *n == 0).count();
if num_zeros < min_num_zeros {
min_zeros_index = i;
min_num_zeros = num_zeros;
}
i += 1;
}
let min_zeros_layer = &layers[min_zeros_index];
// We want the the number of 1 digits multiplied by the number of 2 digits
let num_ones = min_zeros_layer.iter().filter(|&n| *n == 1).count();
let num_twos = min_zeros_layer.iter().filter(|&n| *n == 2).count();
println!("num ones * num twos: {}", num_ones * num_twos);
// Part 2: Generate the image. 0 is black, 1 is white, and 2 is transparent.
let mut processed_image = vec![3; img_width * img_height];
for i in 0..processed_image.len() {
for layer in &layers {
let pixel_color = layer[i];
if pixel_color == 2 {
continue;
} else {
processed_image[i] = pixel_color;
break;
}
}
if processed_image[i] == 3 {
panic!("Something has gone wrong.");
}
}
// Now we have a processed image, with 0 black and 1 white.
// We have to convert it into a readable form.
for chunk in processed_image.chunks(img_width) {
let string: Vec<String> = chunk.into_iter().map(|d| process_digit(d)).collect();
println!("{}", string.join(" "));
}
Ok(())
}
fn process_digit(&d: &u32) -> String {
if d == 1 {
"*".to_string()
} else {
" ".to_string()
}
}
|
use std::io;
use std::process::{ChildStdin, ChildStdout, Command, Stdio};
use crate::runtime::Int;
fn prepare_command(bs: &[u8]) -> io::Result<Command> {
let prog = match std::str::from_utf8(bs) {
Ok(s) => s,
Err(e) => return Err(io::Error::new(io::ErrorKind::InvalidInput, e)),
};
if cfg!(target_os = "windows") {
let mut cmd = Command::new("cmd");
cmd.args(&["/C", prog]);
Ok(cmd)
} else {
let mut cmd = Command::new("sh");
cmd.args(&["-c", prog]);
Ok(cmd)
}
}
pub fn run_command(bs: &[u8]) -> Int {
fn wrap_err(e: Option<i32>) -> Int {
e.map(Int::from).unwrap_or(1)
}
fn run_command_inner(bs: &[u8]) -> io::Result<Int> {
let status = prepare_command(bs)?.status()?;
Ok(wrap_err(status.code()))
}
match run_command_inner(bs) {
Ok(i) => i,
Err(e) => wrap_err(e.raw_os_error()),
}
}
pub fn command_for_write(bs: &[u8]) -> io::Result<ChildStdin> {
let mut cmd = prepare_command(bs)?;
let mut child = cmd.stdin(Stdio::piped()).stdout(Stdio::inherit()).spawn()?;
Ok(child.stdin.take().unwrap())
}
pub fn command_for_read(bs: &[u8]) -> io::Result<ChildStdout> {
let mut cmd = prepare_command(bs)?;
let mut child = cmd.stdin(Stdio::inherit()).stdout(Stdio::piped()).spawn()?;
Ok(child.stdout.take().unwrap())
}
|
use crate::prelude::*;
use std::os::raw::c_void;
use std::ptr;
#[repr(C)]
#[derive(Debug)]
pub struct VkSpecializationInfo {
pub mapEntryCount: u32,
pub pMapEntries: *const VkSpecializationMapEntry,
pub dataSize: usize,
pub pData: *const c_void,
}
impl VkSpecializationInfo {
pub fn empty() -> Self {
VkSpecializationInfo {
mapEntryCount: 0,
pMapEntries: ptr::null(),
dataSize: 0,
pData: ptr::null(),
}
}
pub fn new<T>(data: &T, map_entries: &[VkSpecializationMapEntry]) -> Self {
VkSpecializationInfo {
mapEntryCount: map_entries.len() as u32,
pMapEntries: map_entries.as_ptr(),
dataSize: std::mem::size_of::<T>(),
pData: data as *const T as *const c_void,
}
}
pub fn set_map_entries(&mut self, map_entries: &[VkSpecializationMapEntry]) {
self.mapEntryCount = map_entries.len() as u32;
self.pMapEntries = map_entries.as_ptr();
}
pub fn set_data<T>(&mut self, data: &T) {
self.dataSize = std::mem::size_of::<T>();
self.pData = data as *const T as *const c_void;
}
}
|
pub struct Solution;
impl Solution {
pub fn my_pow(x: f64, n: i32) -> f64 {
if n > 0 {
1.0 / Solution::my_pow(x, -n)
} else if n == 0 {
1.0
} else if n % 2 == 0 {
square(Solution::my_pow(x, n / 2))
} else {
Solution::my_pow(x, n + 1) / x
}
}
}
fn square(x: f64) -> f64 {
x * x
}
#[test]
fn test0050() {
assert_eq!(Solution::my_pow(2.0, 10), 1024.0);
assert_eq!((Solution::my_pow(2.1, 3) * 1e6).round(), 9.261 * 1e6);
assert_eq!(Solution::my_pow(2.0, -2), 0.25);
}
|
/* Python code is pre-scanned for symbols in the ast.
This ensures that global and nonlocal keywords are picked up.
Then the compiler can use the symbol table to generate proper
load and store instructions for names.
Inspirational file: https://github.com/python/cpython/blob/main/Python/symtable.c
*/
use crate::{
error::{CodegenError, CodegenErrorType},
IndexMap,
};
use bitflags::bitflags;
use rustpython_ast::{self as ast, located::Located};
use rustpython_parser_core::source_code::{LineNumber, SourceLocation};
use std::{borrow::Cow, fmt};
/// Captures all symbols in the current scope, and has a list of sub-scopes in this scope.
#[derive(Clone)]
pub struct SymbolTable {
/// The name of this symbol table. Often the name of the class or function.
pub name: String,
/// The type of symbol table
pub typ: SymbolTableType,
/// The line number in the source code where this symboltable begins.
pub line_number: u32,
// Return True if the block is a nested class or function
pub is_nested: bool,
/// A set of symbols present on this scope level.
pub symbols: IndexMap<String, Symbol>,
/// A list of sub-scopes in the order as found in the
/// AST nodes.
pub sub_tables: Vec<SymbolTable>,
}
impl SymbolTable {
fn new(name: String, typ: SymbolTableType, line_number: u32, is_nested: bool) -> Self {
SymbolTable {
name,
typ,
line_number,
is_nested,
symbols: IndexMap::default(),
sub_tables: vec![],
}
}
pub fn scan_program(program: &[ast::located::Stmt]) -> SymbolTableResult<Self> {
let mut builder = SymbolTableBuilder::new();
builder.scan_statements(program)?;
builder.finish()
}
pub fn scan_expr(expr: &ast::located::Expr) -> SymbolTableResult<Self> {
let mut builder = SymbolTableBuilder::new();
builder.scan_expression(expr, ExpressionContext::Load)?;
builder.finish()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SymbolTableType {
Module,
Class,
Function,
Comprehension,
}
impl fmt::Display for SymbolTableType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SymbolTableType::Module => write!(f, "module"),
SymbolTableType::Class => write!(f, "class"),
SymbolTableType::Function => write!(f, "function"),
SymbolTableType::Comprehension => write!(f, "comprehension"),
}
}
}
/// Indicator for a single symbol what the scope of this symbol is.
/// The scope can be unknown, which is unfortunate, but not impossible.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SymbolScope {
Unknown,
Local,
GlobalExplicit,
GlobalImplicit,
Free,
Cell,
}
bitflags! {
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct SymbolFlags: u16 {
const REFERENCED = 0x001;
const ASSIGNED = 0x002;
const PARAMETER = 0x004;
const ANNOTATED = 0x008;
const IMPORTED = 0x010;
const NONLOCAL = 0x020;
// indicates if the symbol gets a value assigned by a named expression in a comprehension
// this is required to correct the scope in the analysis.
const ASSIGNED_IN_COMPREHENSION = 0x040;
// indicates that the symbol is used a bound iterator variable. We distinguish this case
// from normal assignment to detect disallowed re-assignment to iterator variables.
const ITER = 0x080;
/// indicates that the symbol is a free variable in a class method from the scope that the
/// class is defined in, e.g.:
/// ```python
/// def foo(x):
/// class A:
/// def method(self):
/// return x // is_free_class
/// ```
const FREE_CLASS = 0x100;
const BOUND = Self::ASSIGNED.bits() | Self::PARAMETER.bits() | Self::IMPORTED.bits() | Self::ITER.bits();
}
}
/// A single symbol in a table. Has various properties such as the scope
/// of the symbol, and also the various uses of the symbol.
#[derive(Debug, Clone)]
pub struct Symbol {
pub name: String,
pub scope: SymbolScope,
pub flags: SymbolFlags,
}
impl Symbol {
fn new(name: &str) -> Self {
Symbol {
name: name.to_owned(),
// table,
scope: SymbolScope::Unknown,
flags: SymbolFlags::empty(),
}
}
pub fn is_global(&self) -> bool {
matches!(
self.scope,
SymbolScope::GlobalExplicit | SymbolScope::GlobalImplicit
)
}
pub fn is_local(&self) -> bool {
matches!(self.scope, SymbolScope::Local | SymbolScope::Cell)
}
pub fn is_bound(&self) -> bool {
self.flags.intersects(SymbolFlags::BOUND)
}
}
#[derive(Debug)]
pub struct SymbolTableError {
error: String,
location: Option<SourceLocation>,
}
impl SymbolTableError {
pub fn into_codegen_error(self, source_path: String) -> CodegenError {
CodegenError {
error: CodegenErrorType::SyntaxError(self.error),
location: self.location.map(|l| SourceLocation {
row: l.row,
column: l.column,
}),
source_path,
}
}
}
type SymbolTableResult<T = ()> = Result<T, SymbolTableError>;
impl SymbolTable {
pub fn lookup(&self, name: &str) -> Option<&Symbol> {
self.symbols.get(name)
}
}
impl std::fmt::Debug for SymbolTable {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"SymbolTable({:?} symbols, {:?} sub scopes)",
self.symbols.len(),
self.sub_tables.len()
)
}
}
/* Perform some sort of analysis on nonlocals, globals etc..
See also: https://github.com/python/cpython/blob/main/Python/symtable.c#L410
*/
fn analyze_symbol_table(symbol_table: &mut SymbolTable) -> SymbolTableResult {
let mut analyzer = SymbolTableAnalyzer::default();
analyzer.analyze_symbol_table(symbol_table)
}
type SymbolMap = IndexMap<String, Symbol>;
mod stack {
use std::panic;
use std::ptr::NonNull;
pub struct StackStack<T> {
v: Vec<NonNull<T>>,
}
impl<T> Default for StackStack<T> {
fn default() -> Self {
Self { v: Vec::new() }
}
}
impl<T> StackStack<T> {
/// Appends a reference to this stack for the duration of the function `f`. When `f`
/// returns, the reference will be popped off the stack.
pub fn with_append<F, R>(&mut self, x: &mut T, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
{
self.v.push(x.into());
let res = panic::catch_unwind(panic::AssertUnwindSafe(|| f(self)));
self.v.pop();
res.unwrap_or_else(|x| panic::resume_unwind(x))
}
pub fn iter(&self) -> impl Iterator<Item = &T> + DoubleEndedIterator + '_ {
self.as_ref().iter().copied()
}
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> + DoubleEndedIterator + '_ {
self.as_mut().iter_mut().map(|x| &mut **x)
}
// pub fn top(&self) -> Option<&T> {
// self.as_ref().last().copied()
// }
// pub fn top_mut(&mut self) -> Option<&mut T> {
// self.as_mut().last_mut().map(|x| &mut **x)
// }
pub fn len(&self) -> usize {
self.v.len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn as_ref(&self) -> &[&T] {
unsafe { &*(self.v.as_slice() as *const [NonNull<T>] as *const [&T]) }
}
pub fn as_mut(&mut self) -> &mut [&mut T] {
unsafe { &mut *(self.v.as_mut_slice() as *mut [NonNull<T>] as *mut [&mut T]) }
}
}
}
use stack::StackStack;
/// Symbol table analysis. Can be used to analyze a fully
/// build symbol table structure. It will mark variables
/// as local variables for example.
#[derive(Default)]
#[repr(transparent)]
struct SymbolTableAnalyzer {
tables: StackStack<(SymbolMap, SymbolTableType)>,
}
impl SymbolTableAnalyzer {
fn analyze_symbol_table(&mut self, symbol_table: &mut SymbolTable) -> SymbolTableResult {
let symbols = std::mem::take(&mut symbol_table.symbols);
let sub_tables = &mut *symbol_table.sub_tables;
let mut info = (symbols, symbol_table.typ);
self.tables.with_append(&mut info, |list| {
let inner_scope = unsafe { &mut *(list as *mut _ as *mut SymbolTableAnalyzer) };
// Analyze sub scopes:
for sub_table in sub_tables.iter_mut() {
inner_scope.analyze_symbol_table(sub_table)?;
}
Ok(())
})?;
symbol_table.symbols = info.0;
// Analyze symbols:
for symbol in symbol_table.symbols.values_mut() {
self.analyze_symbol(symbol, symbol_table.typ, sub_tables)?;
}
Ok(())
}
fn analyze_symbol(
&mut self,
symbol: &mut Symbol,
st_typ: SymbolTableType,
sub_tables: &mut [SymbolTable],
) -> SymbolTableResult {
if symbol
.flags
.contains(SymbolFlags::ASSIGNED_IN_COMPREHENSION)
&& st_typ == SymbolTableType::Comprehension
{
// propagate symbol to next higher level that can hold it,
// i.e., function or module. Comprehension is skipped and
// Class is not allowed and detected as error.
//symbol.scope = SymbolScope::Nonlocal;
self.analyze_symbol_comprehension(symbol, 0)?
} else {
match symbol.scope {
SymbolScope::Free => {
if !self.tables.as_ref().is_empty() {
let scope_depth = self.tables.as_ref().len();
// check if the name is already defined in any outer scope
// therefore
if scope_depth < 2
|| self.found_in_outer_scope(&symbol.name) != Some(SymbolScope::Free)
{
return Err(SymbolTableError {
error: format!("no binding for nonlocal '{}' found", symbol.name),
// TODO: accurate location info, somehow
location: None,
});
}
} else {
return Err(SymbolTableError {
error: format!(
"nonlocal {} defined at place without an enclosing scope",
symbol.name
),
// TODO: accurate location info, somehow
location: None,
});
}
}
SymbolScope::GlobalExplicit | SymbolScope::GlobalImplicit => {
// TODO: add more checks for globals?
}
SymbolScope::Local | SymbolScope::Cell => {
// all is well
}
SymbolScope::Unknown => {
// Try hard to figure out what the scope of this symbol is.
let scope = if symbol.is_bound() {
self.found_in_inner_scope(sub_tables, &symbol.name, st_typ)
.unwrap_or(SymbolScope::Local)
} else if let Some(scope) = self.found_in_outer_scope(&symbol.name) {
scope
} else if self.tables.is_empty() {
// Don't make assumptions when we don't know.
SymbolScope::Unknown
} else {
// If there are scopes above we assume global.
SymbolScope::GlobalImplicit
};
symbol.scope = scope;
}
}
}
Ok(())
}
fn found_in_outer_scope(&mut self, name: &str) -> Option<SymbolScope> {
let mut decl_depth = None;
for (i, (symbols, typ)) in self.tables.iter().rev().enumerate() {
if matches!(typ, SymbolTableType::Module)
|| matches!(typ, SymbolTableType::Class if name != "__class__")
{
continue;
}
if let Some(sym) = symbols.get(name) {
match sym.scope {
SymbolScope::GlobalExplicit => return Some(SymbolScope::GlobalExplicit),
SymbolScope::GlobalImplicit => {}
_ => {
if sym.is_bound() {
decl_depth = Some(i);
break;
}
}
}
}
}
if let Some(decl_depth) = decl_depth {
// decl_depth is the number of tables between the current one and
// the one that declared the cell var
for (table, typ) in self.tables.iter_mut().rev().take(decl_depth) {
if let SymbolTableType::Class = typ {
if let Some(free_class) = table.get_mut(name) {
free_class.flags.insert(SymbolFlags::FREE_CLASS)
} else {
let mut symbol = Symbol::new(name);
symbol.flags.insert(SymbolFlags::FREE_CLASS);
symbol.scope = SymbolScope::Free;
table.insert(name.to_owned(), symbol);
}
} else if !table.contains_key(name) {
let mut symbol = Symbol::new(name);
symbol.scope = SymbolScope::Free;
// symbol.is_referenced = true;
table.insert(name.to_owned(), symbol);
}
}
}
decl_depth.map(|_| SymbolScope::Free)
}
fn found_in_inner_scope(
&self,
sub_tables: &[SymbolTable],
name: &str,
st_typ: SymbolTableType,
) -> Option<SymbolScope> {
sub_tables.iter().find_map(|st| {
let sym = st.symbols.get(name)?;
if sym.scope == SymbolScope::Free || sym.flags.contains(SymbolFlags::FREE_CLASS) {
if st_typ == SymbolTableType::Class && name != "__class__" {
None
} else {
Some(SymbolScope::Cell)
}
} else if sym.scope == SymbolScope::GlobalExplicit && self.tables.is_empty() {
// the symbol is defined on the module level, and an inner scope declares
// a global that points to it
Some(SymbolScope::GlobalExplicit)
} else {
None
}
})
}
// Implements the symbol analysis and scope extension for names
// assigned by a named expression in a comprehension. See:
// https://github.com/python/cpython/blob/7b78e7f9fd77bb3280ee39fb74b86772a7d46a70/Python/symtable.c#L1435
fn analyze_symbol_comprehension(
&mut self,
symbol: &mut Symbol,
parent_offset: usize,
) -> SymbolTableResult {
// when this is called, we expect to be in the direct parent scope of the scope that contains 'symbol'
let last = self.tables.iter_mut().rev().nth(parent_offset).unwrap();
let symbols = &mut last.0;
let table_type = last.1;
// it is not allowed to use an iterator variable as assignee in a named expression
if symbol.flags.contains(SymbolFlags::ITER) {
return Err(SymbolTableError {
error: format!(
"assignment expression cannot rebind comprehension iteration variable {}",
symbol.name
),
// TODO: accurate location info, somehow
location: None,
});
}
match table_type {
SymbolTableType::Module => {
symbol.scope = SymbolScope::GlobalImplicit;
}
SymbolTableType::Class => {
// named expressions are forbidden in comprehensions on class scope
return Err(SymbolTableError {
error: "assignment expression within a comprehension cannot be used in a class body".to_string(),
// TODO: accurate location info, somehow
location: None,
});
}
SymbolTableType::Function => {
if let Some(parent_symbol) = symbols.get_mut(&symbol.name) {
if let SymbolScope::Unknown = parent_symbol.scope {
// this information is new, as the assignment is done in inner scope
parent_symbol.flags.insert(SymbolFlags::ASSIGNED);
}
symbol.scope = if parent_symbol.is_global() {
parent_symbol.scope
} else {
SymbolScope::Free
};
} else {
let mut cloned_sym = symbol.clone();
cloned_sym.scope = SymbolScope::Cell;
last.0.insert(cloned_sym.name.to_owned(), cloned_sym);
}
}
SymbolTableType::Comprehension => {
// TODO check for conflicts - requires more context information about variables
match symbols.get_mut(&symbol.name) {
Some(parent_symbol) => {
// check if assignee is an iterator in top scope
if parent_symbol.flags.contains(SymbolFlags::ITER) {
return Err(SymbolTableError {
error: format!("assignment expression cannot rebind comprehension iteration variable {}", symbol.name),
location: None,
});
}
// we synthesize the assignment to the symbol from inner scope
parent_symbol.flags.insert(SymbolFlags::ASSIGNED); // more checks are required
}
None => {
// extend the scope of the inner symbol
// as we are in a nested comprehension, we expect that the symbol is needed
// outside, too, and set it therefore to non-local scope. I.e., we expect to
// find a definition on a higher level
let mut cloned_sym = symbol.clone();
cloned_sym.scope = SymbolScope::Free;
last.0.insert(cloned_sym.name.to_owned(), cloned_sym);
}
}
self.analyze_symbol_comprehension(symbol, parent_offset + 1)?;
}
}
Ok(())
}
}
#[derive(Debug, Clone)]
enum SymbolUsage {
Global,
Nonlocal,
Used,
Assigned,
Imported,
AnnotationAssigned,
Parameter,
AnnotationParameter,
AssignedNamedExprInComprehension,
Iter,
}
struct SymbolTableBuilder {
class_name: Option<String>,
// Scope stack.
tables: Vec<SymbolTable>,
future_annotations: bool,
}
/// Enum to indicate in what mode an expression
/// was used.
/// In cpython this is stored in the AST, but I think this
/// is not logical, since it is not context free.
#[derive(Copy, Clone, PartialEq)]
enum ExpressionContext {
Load,
Store,
Delete,
Iter,
IterDefinitionExp,
}
impl SymbolTableBuilder {
fn new() -> Self {
let mut this = Self {
class_name: None,
tables: vec![],
future_annotations: false,
};
this.enter_scope("top", SymbolTableType::Module, 0);
this
}
}
impl SymbolTableBuilder {
fn finish(mut self) -> Result<SymbolTable, SymbolTableError> {
assert_eq!(self.tables.len(), 1);
let mut symbol_table = self.tables.pop().unwrap();
analyze_symbol_table(&mut symbol_table)?;
Ok(symbol_table)
}
fn enter_scope(&mut self, name: &str, typ: SymbolTableType, line_number: u32) {
let is_nested = self
.tables
.last()
.map(|table| table.is_nested || table.typ == SymbolTableType::Function)
.unwrap_or(false);
let table = SymbolTable::new(name.to_owned(), typ, line_number, is_nested);
self.tables.push(table);
}
/// Pop symbol table and add to sub table of parent table.
fn leave_scope(&mut self) {
let table = self.tables.pop().unwrap();
self.tables.last_mut().unwrap().sub_tables.push(table);
}
fn scan_statements(&mut self, statements: &[ast::located::Stmt]) -> SymbolTableResult {
for statement in statements {
self.scan_statement(statement)?;
}
Ok(())
}
fn scan_parameters(
&mut self,
parameters: &[ast::located::ArgWithDefault],
) -> SymbolTableResult {
for parameter in parameters {
let usage = if parameter.def.annotation.is_some() {
SymbolUsage::AnnotationParameter
} else {
SymbolUsage::Parameter
};
self.register_name(parameter.def.arg.as_str(), usage, parameter.def.location())?;
}
Ok(())
}
fn scan_parameter(&mut self, parameter: &ast::located::Arg) -> SymbolTableResult {
let usage = if parameter.annotation.is_some() {
SymbolUsage::AnnotationParameter
} else {
SymbolUsage::Parameter
};
self.register_name(parameter.arg.as_str(), usage, parameter.location())
}
fn scan_annotation(&mut self, annotation: &ast::located::Expr) -> SymbolTableResult {
if self.future_annotations {
Ok(())
} else {
self.scan_expression(annotation, ExpressionContext::Load)
}
}
fn scan_statement(&mut self, statement: &ast::located::Stmt) -> SymbolTableResult {
use ast::located::*;
if let Stmt::ImportFrom(StmtImportFrom { module, names, .. }) = &statement {
if module.as_ref().map(|id| id.as_str()) == Some("__future__") {
for feature in names {
if &feature.name == "annotations" {
self.future_annotations = true;
}
}
}
}
match &statement {
Stmt::Global(StmtGlobal { names, range }) => {
for name in names {
self.register_name(name.as_str(), SymbolUsage::Global, range.start)?;
}
}
Stmt::Nonlocal(StmtNonlocal { names, range }) => {
for name in names {
self.register_name(name.as_str(), SymbolUsage::Nonlocal, range.start)?;
}
}
Stmt::FunctionDef(StmtFunctionDef {
name,
body,
args,
decorator_list,
returns,
range,
..
})
| Stmt::AsyncFunctionDef(StmtAsyncFunctionDef {
name,
body,
args,
decorator_list,
returns,
range,
..
}) => {
self.scan_expressions(decorator_list, ExpressionContext::Load)?;
self.register_name(name.as_str(), SymbolUsage::Assigned, range.start)?;
if let Some(expression) = returns {
self.scan_annotation(expression)?;
}
self.enter_function(name.as_str(), args, range.start.row)?;
self.scan_statements(body)?;
self.leave_scope();
}
Stmt::ClassDef(StmtClassDef {
name,
body,
bases,
keywords,
decorator_list,
type_params: _,
range,
}) => {
self.enter_scope(name.as_str(), SymbolTableType::Class, range.start.row.get());
let prev_class = std::mem::replace(&mut self.class_name, Some(name.to_string()));
self.register_name("__module__", SymbolUsage::Assigned, range.start)?;
self.register_name("__qualname__", SymbolUsage::Assigned, range.start)?;
self.register_name("__doc__", SymbolUsage::Assigned, range.start)?;
self.register_name("__class__", SymbolUsage::Assigned, range.start)?;
self.scan_statements(body)?;
self.leave_scope();
self.class_name = prev_class;
self.scan_expressions(bases, ExpressionContext::Load)?;
for keyword in keywords {
self.scan_expression(&keyword.value, ExpressionContext::Load)?;
}
self.scan_expressions(decorator_list, ExpressionContext::Load)?;
self.register_name(name.as_str(), SymbolUsage::Assigned, range.start)?;
}
Stmt::Expr(StmtExpr { value, .. }) => {
self.scan_expression(value, ExpressionContext::Load)?
}
Stmt::If(StmtIf {
test, body, orelse, ..
}) => {
self.scan_expression(test, ExpressionContext::Load)?;
self.scan_statements(body)?;
self.scan_statements(orelse)?;
}
Stmt::For(StmtFor {
target,
iter,
body,
orelse,
..
})
| Stmt::AsyncFor(StmtAsyncFor {
target,
iter,
body,
orelse,
..
}) => {
self.scan_expression(target, ExpressionContext::Store)?;
self.scan_expression(iter, ExpressionContext::Load)?;
self.scan_statements(body)?;
self.scan_statements(orelse)?;
}
Stmt::While(StmtWhile {
test, body, orelse, ..
}) => {
self.scan_expression(test, ExpressionContext::Load)?;
self.scan_statements(body)?;
self.scan_statements(orelse)?;
}
Stmt::Break(_) | Stmt::Continue(_) | Stmt::Pass(_) => {
// No symbols here.
}
Stmt::Import(StmtImport { names, range })
| Stmt::ImportFrom(StmtImportFrom { names, range, .. }) => {
for name in names {
if let Some(alias) = &name.asname {
// `import my_module as my_alias`
self.register_name(alias.as_str(), SymbolUsage::Imported, range.start)?;
} else {
// `import module`
self.register_name(
name.name.split('.').next().unwrap(),
SymbolUsage::Imported,
range.start,
)?;
}
}
}
Stmt::Return(StmtReturn { value, .. }) => {
if let Some(expression) = value {
self.scan_expression(expression, ExpressionContext::Load)?;
}
}
Stmt::Assert(StmtAssert { test, msg, .. }) => {
self.scan_expression(test, ExpressionContext::Load)?;
if let Some(expression) = msg {
self.scan_expression(expression, ExpressionContext::Load)?;
}
}
Stmt::Delete(StmtDelete { targets, .. }) => {
self.scan_expressions(targets, ExpressionContext::Delete)?;
}
Stmt::Assign(StmtAssign { targets, value, .. }) => {
self.scan_expressions(targets, ExpressionContext::Store)?;
self.scan_expression(value, ExpressionContext::Load)?;
}
Stmt::AugAssign(StmtAugAssign { target, value, .. }) => {
self.scan_expression(target, ExpressionContext::Store)?;
self.scan_expression(value, ExpressionContext::Load)?;
}
Stmt::AnnAssign(StmtAnnAssign {
target,
annotation,
value,
simple,
range,
}) => {
// https://github.com/python/cpython/blob/main/Python/symtable.c#L1233
match &**target {
Expr::Name(ast::ExprName { id, .. }) if *simple => {
self.register_name(
id.as_str(),
SymbolUsage::AnnotationAssigned,
range.start,
)?;
}
_ => {
self.scan_expression(target, ExpressionContext::Store)?;
}
}
self.scan_annotation(annotation)?;
if let Some(value) = value {
self.scan_expression(value, ExpressionContext::Load)?;
}
}
Stmt::With(StmtWith { items, body, .. })
| Stmt::AsyncWith(StmtAsyncWith { items, body, .. }) => {
for item in items {
self.scan_expression(&item.context_expr, ExpressionContext::Load)?;
if let Some(expression) = &item.optional_vars {
self.scan_expression(expression, ExpressionContext::Store)?;
}
}
self.scan_statements(body)?;
}
Stmt::Try(StmtTry {
body,
handlers,
orelse,
finalbody,
range,
})
| Stmt::TryStar(StmtTryStar {
body,
handlers,
orelse,
finalbody,
range,
}) => {
self.scan_statements(body)?;
for handler in handlers {
let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
type_,
name,
body,
..
}) = &handler;
if let Some(expression) = type_ {
self.scan_expression(expression, ExpressionContext::Load)?;
}
if let Some(name) = name {
self.register_name(name.as_str(), SymbolUsage::Assigned, range.start)?;
}
self.scan_statements(body)?;
}
self.scan_statements(orelse)?;
self.scan_statements(finalbody)?;
}
Stmt::Match(StmtMatch { subject, .. }) => {
return Err(SymbolTableError {
error: "match expression is not implemented yet".to_owned(),
location: Some(subject.location()),
});
}
Stmt::Raise(StmtRaise { exc, cause, .. }) => {
if let Some(expression) = exc {
self.scan_expression(expression, ExpressionContext::Load)?;
}
if let Some(expression) = cause {
self.scan_expression(expression, ExpressionContext::Load)?;
}
}
Stmt::TypeAlias(StmtTypeAlias { .. }) => {}
}
Ok(())
}
fn scan_expressions(
&mut self,
expressions: &[ast::located::Expr],
context: ExpressionContext,
) -> SymbolTableResult {
for expression in expressions {
self.scan_expression(expression, context)?;
}
Ok(())
}
fn scan_expression(
&mut self,
expression: &ast::located::Expr,
context: ExpressionContext,
) -> SymbolTableResult {
use ast::located::*;
match expression {
Expr::BinOp(ExprBinOp {
left,
right,
range: _,
..
}) => {
self.scan_expression(left, context)?;
self.scan_expression(right, context)?;
}
Expr::BoolOp(ExprBoolOp {
values, range: _, ..
}) => {
self.scan_expressions(values, context)?;
}
Expr::Compare(ExprCompare {
left,
comparators,
range: _,
..
}) => {
self.scan_expression(left, context)?;
self.scan_expressions(comparators, context)?;
}
Expr::Subscript(ExprSubscript {
value,
slice,
range: _,
..
}) => {
self.scan_expression(value, ExpressionContext::Load)?;
self.scan_expression(slice, ExpressionContext::Load)?;
}
Expr::Attribute(ExprAttribute {
value, range: _, ..
}) => {
self.scan_expression(value, ExpressionContext::Load)?;
}
Expr::Dict(ExprDict {
keys,
values,
range: _,
}) => {
for (key, value) in keys.iter().zip(values.iter()) {
if let Some(key) = key {
self.scan_expression(key, context)?;
}
self.scan_expression(value, context)?;
}
}
Expr::Await(ExprAwait { value, range: _ }) => {
self.scan_expression(value, context)?;
}
Expr::Yield(ExprYield { value, range: _ }) => {
if let Some(expression) = value {
self.scan_expression(expression, context)?;
}
}
Expr::YieldFrom(ExprYieldFrom { value, range: _ }) => {
self.scan_expression(value, context)?;
}
Expr::UnaryOp(ExprUnaryOp {
operand, range: _, ..
}) => {
self.scan_expression(operand, context)?;
}
Expr::Constant(ExprConstant { range: _, .. }) => {}
Expr::Starred(ExprStarred {
value, range: _, ..
}) => {
self.scan_expression(value, context)?;
}
Expr::Tuple(ExprTuple { elts, range: _, .. })
| Expr::Set(ExprSet { elts, range: _, .. })
| Expr::List(ExprList { elts, range: _, .. }) => {
self.scan_expressions(elts, context)?;
}
Expr::Slice(ExprSlice {
lower,
upper,
step,
range: _,
}) => {
if let Some(lower) = lower {
self.scan_expression(lower, context)?;
}
if let Some(upper) = upper {
self.scan_expression(upper, context)?;
}
if let Some(step) = step {
self.scan_expression(step, context)?;
}
}
Expr::GeneratorExp(ExprGeneratorExp {
elt,
generators,
range,
}) => {
self.scan_comprehension("genexpr", elt, None, generators, range.start)?;
}
Expr::ListComp(ExprListComp {
elt,
generators,
range,
}) => {
self.scan_comprehension("genexpr", elt, None, generators, range.start)?;
}
Expr::SetComp(ExprSetComp {
elt,
generators,
range,
}) => {
self.scan_comprehension("genexpr", elt, None, generators, range.start)?;
}
Expr::DictComp(ExprDictComp {
key,
value,
generators,
range,
}) => {
self.scan_comprehension("genexpr", key, Some(value), generators, range.start)?;
}
Expr::Call(ExprCall {
func,
args,
keywords,
range: _,
}) => {
match context {
ExpressionContext::IterDefinitionExp => {
self.scan_expression(func, ExpressionContext::IterDefinitionExp)?;
}
_ => {
self.scan_expression(func, ExpressionContext::Load)?;
}
}
self.scan_expressions(args, ExpressionContext::Load)?;
for keyword in keywords {
self.scan_expression(&keyword.value, ExpressionContext::Load)?;
}
}
Expr::FormattedValue(ExprFormattedValue {
value,
format_spec,
range: _,
..
}) => {
self.scan_expression(value, ExpressionContext::Load)?;
if let Some(spec) = format_spec {
self.scan_expression(spec, ExpressionContext::Load)?;
}
}
Expr::JoinedStr(ExprJoinedStr { values, range: _ }) => {
for value in values {
self.scan_expression(value, ExpressionContext::Load)?;
}
}
Expr::Name(ExprName { id, range, .. }) => {
let id = id.as_str();
// Determine the contextual usage of this symbol:
match context {
ExpressionContext::Delete => {
self.register_name(id, SymbolUsage::Assigned, range.start)?;
self.register_name(id, SymbolUsage::Used, range.start)?;
}
ExpressionContext::Load | ExpressionContext::IterDefinitionExp => {
self.register_name(id, SymbolUsage::Used, range.start)?;
}
ExpressionContext::Store => {
self.register_name(id, SymbolUsage::Assigned, range.start)?;
}
ExpressionContext::Iter => {
self.register_name(id, SymbolUsage::Iter, range.start)?;
}
}
// Interesting stuff about the __class__ variable:
// https://docs.python.org/3/reference/datamodel.html?highlight=__class__#creating-the-class-object
if context == ExpressionContext::Load
&& self.tables.last().unwrap().typ == SymbolTableType::Function
&& id == "super"
{
self.register_name("__class__", SymbolUsage::Used, range.start)?;
}
}
Expr::Lambda(ExprLambda {
args,
body,
range: _,
}) => {
self.enter_function("lambda", args, expression.location().row)?;
match context {
ExpressionContext::IterDefinitionExp => {
self.scan_expression(body, ExpressionContext::IterDefinitionExp)?;
}
_ => {
self.scan_expression(body, ExpressionContext::Load)?;
}
}
self.leave_scope();
}
Expr::IfExp(ExprIfExp {
test,
body,
orelse,
range: _,
}) => {
self.scan_expression(test, ExpressionContext::Load)?;
self.scan_expression(body, ExpressionContext::Load)?;
self.scan_expression(orelse, ExpressionContext::Load)?;
}
Expr::NamedExpr(ExprNamedExpr {
target,
value,
range,
}) => {
// named expressions are not allowed in the definition of
// comprehension iterator definitions
if let ExpressionContext::IterDefinitionExp = context {
return Err(SymbolTableError {
error: "assignment expression cannot be used in a comprehension iterable expression".to_string(),
location: Some(target.location()),
});
}
self.scan_expression(value, ExpressionContext::Load)?;
// special handling for assigned identifier in named expressions
// that are used in comprehensions. This required to correctly
// propagate the scope of the named assigned named and not to
// propagate inner names.
if let Expr::Name(ExprName { id, .. }) = &**target {
let id = id.as_str();
let table = self.tables.last().unwrap();
if table.typ == SymbolTableType::Comprehension {
self.register_name(
id,
SymbolUsage::AssignedNamedExprInComprehension,
range.start,
)?;
} else {
// omit one recursion. When the handling of an store changes for
// Identifiers this needs adapted - more forward safe would be
// calling scan_expression directly.
self.register_name(id, SymbolUsage::Assigned, range.start)?;
}
} else {
self.scan_expression(target, ExpressionContext::Store)?;
}
}
}
Ok(())
}
fn scan_comprehension(
&mut self,
scope_name: &str,
elt1: &ast::located::Expr,
elt2: Option<&ast::located::Expr>,
generators: &[ast::located::Comprehension],
location: SourceLocation,
) -> SymbolTableResult {
// Comprehensions are compiled as functions, so create a scope for them:
self.enter_scope(
scope_name,
SymbolTableType::Comprehension,
location.row.get(),
);
// Register the passed argument to the generator function as the name ".0"
self.register_name(".0", SymbolUsage::Parameter, location)?;
self.scan_expression(elt1, ExpressionContext::Load)?;
if let Some(elt2) = elt2 {
self.scan_expression(elt2, ExpressionContext::Load)?;
}
let mut is_first_generator = true;
for generator in generators {
self.scan_expression(&generator.target, ExpressionContext::Iter)?;
if is_first_generator {
is_first_generator = false;
} else {
self.scan_expression(&generator.iter, ExpressionContext::IterDefinitionExp)?;
}
for if_expr in &generator.ifs {
self.scan_expression(if_expr, ExpressionContext::Load)?;
}
}
self.leave_scope();
// The first iterable is passed as an argument into the created function:
assert!(!generators.is_empty());
self.scan_expression(&generators[0].iter, ExpressionContext::IterDefinitionExp)?;
Ok(())
}
fn enter_function(
&mut self,
name: &str,
args: &ast::located::Arguments,
line_number: LineNumber,
) -> SymbolTableResult {
// Evaluate eventual default parameters:
for default in args
.posonlyargs
.iter()
.chain(args.args.iter())
.chain(args.kwonlyargs.iter())
.filter_map(|arg| arg.default.as_ref())
{
self.scan_expression(default, ExpressionContext::Load)?; // not ExprContext?
}
// Annotations are scanned in outer scope:
for annotation in args
.posonlyargs
.iter()
.chain(args.args.iter())
.chain(args.kwonlyargs.iter())
.filter_map(|arg| arg.def.annotation.as_ref())
{
self.scan_annotation(annotation)?;
}
if let Some(annotation) = args.vararg.as_ref().and_then(|arg| arg.annotation.as_ref()) {
self.scan_annotation(annotation)?;
}
if let Some(annotation) = args.kwarg.as_ref().and_then(|arg| arg.annotation.as_ref()) {
self.scan_annotation(annotation)?;
}
self.enter_scope(name, SymbolTableType::Function, line_number.get());
// Fill scope with parameter names:
self.scan_parameters(&args.posonlyargs)?;
self.scan_parameters(&args.args)?;
self.scan_parameters(&args.kwonlyargs)?;
if let Some(name) = &args.vararg {
self.scan_parameter(name)?;
}
if let Some(name) = &args.kwarg {
self.scan_parameter(name)?;
}
Ok(())
}
fn register_name(
&mut self,
name: &str,
role: SymbolUsage,
location: SourceLocation,
) -> SymbolTableResult {
let location = Some(location);
let scope_depth = self.tables.len();
let table = self.tables.last_mut().unwrap();
let name = mangle_name(self.class_name.as_deref(), name);
// Some checks for the symbol that present on this scope level:
let symbol = if let Some(symbol) = table.symbols.get_mut(name.as_ref()) {
let flags = &symbol.flags;
// Role already set..
match role {
SymbolUsage::Global if !symbol.is_global() => {
if flags.contains(SymbolFlags::PARAMETER) {
return Err(SymbolTableError {
error: format!("name '{name}' is parameter and global"),
location,
});
}
if flags.contains(SymbolFlags::REFERENCED) {
return Err(SymbolTableError {
error: format!("name '{name}' is used prior to global declaration"),
location,
});
}
if flags.contains(SymbolFlags::ANNOTATED) {
return Err(SymbolTableError {
error: format!("annotated name '{name}' can't be global"),
location,
});
}
if flags.contains(SymbolFlags::ASSIGNED) {
return Err(SymbolTableError {
error: format!(
"name '{name}' is assigned to before global declaration"
),
location,
});
}
}
SymbolUsage::Nonlocal => {
if flags.contains(SymbolFlags::PARAMETER) {
return Err(SymbolTableError {
error: format!("name '{name}' is parameter and nonlocal"),
location,
});
}
if flags.contains(SymbolFlags::REFERENCED) {
return Err(SymbolTableError {
error: format!("name '{name}' is used prior to nonlocal declaration"),
location,
});
}
if flags.contains(SymbolFlags::ANNOTATED) {
return Err(SymbolTableError {
error: format!("annotated name '{name}' can't be nonlocal"),
location,
});
}
if flags.contains(SymbolFlags::ASSIGNED) {
return Err(SymbolTableError {
error: format!(
"name '{name}' is assigned to before nonlocal declaration"
),
location,
});
}
}
_ => {
// Ok?
}
}
symbol
} else {
// The symbol does not present on this scope level.
// Some checks to insert new symbol into symbol table:
match role {
SymbolUsage::Nonlocal if scope_depth < 2 => {
return Err(SymbolTableError {
error: format!("cannot define nonlocal '{name}' at top level."),
location,
})
}
_ => {
// Ok!
}
}
// Insert symbol when required:
let symbol = Symbol::new(name.as_ref());
table.symbols.entry(name.into_owned()).or_insert(symbol)
};
// Set proper scope and flags on symbol:
let flags = &mut symbol.flags;
match role {
SymbolUsage::Nonlocal => {
symbol.scope = SymbolScope::Free;
flags.insert(SymbolFlags::NONLOCAL);
}
SymbolUsage::Imported => {
flags.insert(SymbolFlags::ASSIGNED | SymbolFlags::IMPORTED);
}
SymbolUsage::Parameter => {
flags.insert(SymbolFlags::PARAMETER);
}
SymbolUsage::AnnotationParameter => {
flags.insert(SymbolFlags::PARAMETER | SymbolFlags::ANNOTATED);
}
SymbolUsage::AnnotationAssigned => {
flags.insert(SymbolFlags::ASSIGNED | SymbolFlags::ANNOTATED);
}
SymbolUsage::Assigned => {
flags.insert(SymbolFlags::ASSIGNED);
}
SymbolUsage::AssignedNamedExprInComprehension => {
flags.insert(SymbolFlags::ASSIGNED | SymbolFlags::ASSIGNED_IN_COMPREHENSION);
}
SymbolUsage::Global => {
symbol.scope = SymbolScope::GlobalExplicit;
}
SymbolUsage::Used => {
flags.insert(SymbolFlags::REFERENCED);
}
SymbolUsage::Iter => {
flags.insert(SymbolFlags::ITER);
}
}
// and even more checking
// it is not allowed to assign to iterator variables (by named expressions)
if flags.contains(SymbolFlags::ITER | SymbolFlags::ASSIGNED)
/*&& symbol.is_assign_named_expr_in_comprehension*/
{
return Err(SymbolTableError {
error:
"assignment expression cannot be used in a comprehension iterable expression"
.to_string(),
location,
});
}
Ok(())
}
}
pub(crate) fn mangle_name<'a>(class_name: Option<&str>, name: &'a str) -> Cow<'a, str> {
let class_name = match class_name {
Some(n) => n,
None => return name.into(),
};
if !name.starts_with("__") || name.ends_with("__") || name.contains('.') {
return name.into();
}
// strip leading underscore
let class_name = class_name.strip_prefix(|c| c == '_').unwrap_or(class_name);
let mut ret = String::with_capacity(1 + class_name.len() + name.len());
ret.push('_');
ret.push_str(class_name);
ret.push_str(name);
ret.into()
}
|
use crate::prelude::*;
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum Nucl {
A,
T,
C,
G,
}
impl Nucl {
pub fn complement(&self) -> Self {
use Nucl::*;
match self {
A => T,
T => A,
C => G,
G => C,
}
}
}
impl fmt::Display for Nucl {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl From<char> for Nucl {
fn from(c: char) -> Self {
use Nucl::*;
match c {
'A' => A,
'T' => T,
'C' => C,
'G' => G,
o => panic!("invalid nucleobase {}", o),
}
}
}
impl From<Nucl> for char {
fn from(c: Nucl) -> Self {
match c {
A => 'A',
T => 'T',
C => 'C',
G => 'G',
}
}
}
impl From<usize> for Nucl {
fn from(c: usize) -> Self {
match c {
0 => A,
1 => T,
2 => C,
3 => G,
o => panic!("invalid nucleobase {}", o),
}
}
}
|
mod model;
pub use self::model::*;
use crate::config::Config;
use reqwest;
use serde::Serialize;
use serde::de::DeserializeOwned;
use std::error;
use std::fmt;
use std::io;
use std::result;
pub type Result<T> = result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
RequestError(reqwest::Error),
IoError(io::Error),
TelegramError(i64, String),
IncompleteTelegramError,
}
impl From<reqwest::Error> for Error {
fn from(err: reqwest::Error) -> Error {
Error::RequestError(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::IoError(err)
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::RequestError(ref err) => err.description(),
Error::IoError(ref err) => err.description(),
Error::TelegramError(..) => "the telegram api returned an error",
Error::IncompleteTelegramError => "the telegram api returned an incomplete error",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::RequestError(ref err) => Some(err),
Error::IoError(ref err) => Some(err),
_ => None,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::RequestError(ref err) => write!(f, "request error: {}", err),
Error::IoError(ref err) => write!(f, "io error: {}", err),
Error::TelegramError(code, ref description) => {
write!(f, "telegram error ({}): {}", code, description)
}
Error::IncompleteTelegramError => {
write!(f, "the telegram api returned an incomplete error")
}
}
}
}
#[derive(Debug)]
pub struct Api {
client: reqwest::Client,
api_url: String,
}
impl Api {
pub fn new(client: reqwest::Client, token: &str) -> Self {
Api {
client,
api_url: format!("https://api.telegram.org/bot{}", token),
}
}
pub(crate) fn from_config(config: &Config) -> Option<Self> {
let telegram = config.main.telegram.as_ref();
telegram.map(|telegram| Api::new(reqwest::Client::new(), &telegram.token))
}
fn call_method<T: Serialize + ?Sized, R: DeserializeOwned>(
&self,
name: &str,
params: &T,
) -> Result<R> {
let resp = self.client
.post(&format!("{}/{}", self.api_url, name))
.form(¶ms)
.send()?;
self.process_response(resp)
}
fn process_response<R: DeserializeOwned>(&self, mut resp: reqwest::Response) -> Result<R> {
let resp: Response<R> = resp.json()?;
if !resp.ok {
return match (resp.error_code, resp.description) {
(Some(code), Some(description)) => Err(Error::TelegramError(code, description)),
_ => Err(Error::IncompleteTelegramError),
};
}
// we can safely unwrap, because a response with
// ok = true always has a result
Ok(resp.result.unwrap())
}
pub fn send_message(&self, params: &SendMessageParams) -> Result<Message> {
self.call_method("sendMessage", params)
}
pub fn set_webhook(&self, params: &SetWebhookParams) -> Result<Message> {
self.call_method("setWebhook", params)
}
pub fn get_updates(&self, params: &GetUpdatesParams) -> Result<Vec<Update>> {
self.call_method("getUpdates", params)
}
pub fn send_document(&self, params: SendDocumentParams) -> Result<Message> {
let SendDocumentParams { chat_id, document } = params;
let form = reqwest::multipart::Form::new().text("chat_id", chat_id);
let form = match document {
File::String(file) => form.text("document", file),
File::InputFile(path) => form.file("document", path)?,
};
let resp = self.client
.post(&format!("{}/sendDocument", self.api_url))
.multipart(form)
.send()?;
self.process_response(resp)
}
///
/// Gets updates and makes sure telegram 'forgets' them.
///
pub fn poll_updates(&self) -> Result<Vec<Update>> {
let updates = self.get_updates(&Default::default())?;
// this make sure that the updates are forgotten
// so the next time we call getUpdates we only receive new updates.
if let Some(last) = updates.last() {
self.get_updates(&GetUpdatesParams {
offset: Some(last.update_id + 1),
..Default::default()
})?;
}
Ok(updates)
}
}
|
use dlal_component_base::component;
component!(
{"in": [], "out": ["audio"]},
[
"run_size",
"multi",
"check_audio",
{"name": "field_helpers", "fields": ["soft", "soft_gain", "hard"], "kinds": ["rw", "json"]},
],
{
soft: f32,
soft_gain: f32,
hard: f32,
},
{
"soft": {
"args": [{
"name": "soft",
"optional": true,
"default": 1.0,
}],
},
"soft_gain": {
"args": [{
"name": "soft_gain",
"optional": true,
"default": 0.5,
}],
},
"hard": {
"args": [{
"name": "hard",
"optional": true,
"default": 1.0,
}],
},
},
);
impl ComponentTrait for Component {
fn init(&mut self) {
self.soft = 1.0;
self.soft_gain = 0.5;
self.hard = 1.0;
}
fn run(&mut self) {
for output in &self.outputs {
let audio = output.audio(self.run_size).unwrap();
for i in audio {
if *i > self.soft {
*i = self.soft + (*i - self.soft) * self.soft_gain;
if *i > self.hard {
*i = self.hard;
}
} else if *i < -self.soft {
*i = -self.soft + (*i + self.soft) * self.soft_gain;
if *i < -self.hard {
*i = -self.hard;
}
}
}
}
}
}
|
use std::iter::FromIterator;
use crate::key::Key;
use crate::repr::Decor;
use crate::table::{Iter, IterMut, KeyValuePairs, TableKeyValue, TableLike};
use crate::{InternalString, Item, KeyMut, RawString, Table, Value};
/// Type representing a TOML inline table,
/// payload of the `Value::InlineTable` variant
#[derive(Debug, Default, Clone)]
pub struct InlineTable {
// `preamble` represents whitespaces in an empty table
preamble: RawString,
// prefix before `{` and suffix after `}`
decor: Decor,
pub(crate) span: Option<std::ops::Range<usize>>,
// whether this is a proxy for dotted keys
dotted: bool,
pub(crate) items: KeyValuePairs,
}
/// Constructors
///
/// See also `FromIterator`
impl InlineTable {
/// Creates an empty table.
pub fn new() -> Self {
Default::default()
}
pub(crate) fn with_pairs(items: KeyValuePairs) -> Self {
Self {
items,
..Default::default()
}
}
/// Convert to a table
pub fn into_table(self) -> Table {
let mut t = Table::with_pairs(self.items);
t.fmt();
t
}
}
/// Formatting
impl InlineTable {
/// Get key/values for values that are visually children of this table
///
/// For example, this will return dotted keys
pub fn get_values(&self) -> Vec<(Vec<&Key>, &Value)> {
let mut values = Vec::new();
let root = Vec::new();
self.append_values(&root, &mut values);
values
}
pub(crate) fn append_values<'s, 'c>(
&'s self,
parent: &[&'s Key],
values: &'c mut Vec<(Vec<&'s Key>, &'s Value)>,
) {
for value in self.items.values() {
let mut path = parent.to_vec();
path.push(&value.key);
match &value.value {
Item::Value(Value::InlineTable(table)) if table.is_dotted() => {
table.append_values(&path, values);
}
Item::Value(value) => {
values.push((path, value));
}
_ => {}
}
}
}
/// Auto formats the table.
pub fn fmt(&mut self) {
decorate_inline_table(self);
}
/// Sorts the key/value pairs by key.
pub fn sort_values(&mut self) {
// Assuming standard tables have their position set and this won't negatively impact them
self.items.sort_keys();
for kv in self.items.values_mut() {
match &mut kv.value {
Item::Value(Value::InlineTable(table)) if table.is_dotted() => {
table.sort_values();
}
_ => {}
}
}
}
/// Sort Key/Value Pairs of the table using the using the comparison function `compare`.
///
/// The comparison function receives two key and value pairs to compare (you can sort by keys or
/// values or their combination as needed).
pub fn sort_values_by<F>(&mut self, mut compare: F)
where
F: FnMut(&Key, &Value, &Key, &Value) -> std::cmp::Ordering,
{
self.sort_values_by_internal(&mut compare);
}
fn sort_values_by_internal<F>(&mut self, compare: &mut F)
where
F: FnMut(&Key, &Value, &Key, &Value) -> std::cmp::Ordering,
{
let modified_cmp = |_: &InternalString,
val1: &TableKeyValue,
_: &InternalString,
val2: &TableKeyValue|
-> std::cmp::Ordering {
match (val1.value.as_value(), val2.value.as_value()) {
(Some(v1), Some(v2)) => compare(&val1.key, v1, &val2.key, v2),
(Some(_), None) => std::cmp::Ordering::Greater,
(None, Some(_)) => std::cmp::Ordering::Less,
(None, None) => std::cmp::Ordering::Equal,
}
};
self.items.sort_by(modified_cmp);
for kv in self.items.values_mut() {
match &mut kv.value {
Item::Value(Value::InlineTable(table)) if table.is_dotted() => {
table.sort_values_by_internal(compare);
}
_ => {}
}
}
}
/// Change this table's dotted status
pub fn set_dotted(&mut self, yes: bool) {
self.dotted = yes;
}
/// Check if this is a wrapper for dotted keys, rather than a standard table
pub fn is_dotted(&self) -> bool {
self.dotted
}
/// Returns the surrounding whitespace
pub fn decor_mut(&mut self) -> &mut Decor {
&mut self.decor
}
/// Returns the surrounding whitespace
pub fn decor(&self) -> &Decor {
&self.decor
}
/// Returns the decor associated with a given key of the table.
pub fn key_decor_mut(&mut self, key: &str) -> Option<&mut Decor> {
self.items.get_mut(key).map(|kv| &mut kv.key.decor)
}
/// Returns the decor associated with a given key of the table.
pub fn key_decor(&self, key: &str) -> Option<&Decor> {
self.items.get(key).map(|kv| &kv.key.decor)
}
/// Set whitespace after before element
pub fn set_preamble(&mut self, preamble: impl Into<RawString>) {
self.preamble = preamble.into();
}
/// Whitespace after before element
pub fn preamble(&self) -> &RawString {
&self.preamble
}
/// Returns the location within the original document
pub(crate) fn span(&self) -> Option<std::ops::Range<usize>> {
self.span.clone()
}
pub(crate) fn despan(&mut self, input: &str) {
self.span = None;
self.decor.despan(input);
self.preamble.despan(input);
for kv in self.items.values_mut() {
kv.key.despan(input);
kv.value.despan(input);
}
}
}
impl InlineTable {
/// Returns an iterator over key/value pairs.
pub fn iter(&self) -> InlineTableIter<'_> {
Box::new(
self.items
.iter()
.filter(|&(_, kv)| kv.value.is_value())
.map(|(k, kv)| (&k[..], kv.value.as_value().unwrap())),
)
}
/// Returns an iterator over key/value pairs.
pub fn iter_mut(&mut self) -> InlineTableIterMut<'_> {
Box::new(
self.items
.iter_mut()
.filter(|(_, kv)| kv.value.is_value())
.map(|(_, kv)| (kv.key.as_mut(), kv.value.as_value_mut().unwrap())),
)
}
/// Returns the number of key/value pairs.
pub fn len(&self) -> usize {
self.iter().count()
}
/// Returns true iff the table is empty.
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the table, removing all key-value pairs. Keeps the allocated memory for reuse.
pub fn clear(&mut self) {
self.items.clear()
}
/// Gets the given key's corresponding entry in the Table for in-place manipulation.
pub fn entry(&'_ mut self, key: impl Into<InternalString>) -> InlineEntry<'_> {
match self.items.entry(key.into()) {
indexmap::map::Entry::Occupied(mut entry) => {
// Ensure it is a `Value` to simplify `InlineOccupiedEntry`'s code.
let scratch = std::mem::take(&mut entry.get_mut().value);
let scratch = Item::Value(
scratch
.into_value()
// HACK: `Item::None` is a corner case of a corner case, let's just pick a
// "safe" value
.unwrap_or_else(|_| Value::InlineTable(Default::default())),
);
entry.get_mut().value = scratch;
InlineEntry::Occupied(InlineOccupiedEntry { entry })
}
indexmap::map::Entry::Vacant(entry) => {
InlineEntry::Vacant(InlineVacantEntry { entry, key: None })
}
}
}
/// Gets the given key's corresponding entry in the Table for in-place manipulation.
pub fn entry_format<'a>(&'a mut self, key: &Key) -> InlineEntry<'a> {
// Accept a `&Key` to be consistent with `entry`
match self.items.entry(key.get().into()) {
indexmap::map::Entry::Occupied(mut entry) => {
// Ensure it is a `Value` to simplify `InlineOccupiedEntry`'s code.
let scratch = std::mem::take(&mut entry.get_mut().value);
let scratch = Item::Value(
scratch
.into_value()
// HACK: `Item::None` is a corner case of a corner case, let's just pick a
// "safe" value
.unwrap_or_else(|_| Value::InlineTable(Default::default())),
);
entry.get_mut().value = scratch;
InlineEntry::Occupied(InlineOccupiedEntry { entry })
}
indexmap::map::Entry::Vacant(entry) => InlineEntry::Vacant(InlineVacantEntry {
entry,
key: Some(key.clone()),
}),
}
}
/// Return an optional reference to the value at the given the key.
pub fn get(&self, key: &str) -> Option<&Value> {
self.items.get(key).and_then(|kv| kv.value.as_value())
}
/// Return an optional mutable reference to the value at the given the key.
pub fn get_mut(&mut self, key: &str) -> Option<&mut Value> {
self.items
.get_mut(key)
.and_then(|kv| kv.value.as_value_mut())
}
/// Return references to the key-value pair stored for key, if it is present, else None.
pub fn get_key_value<'a>(&'a self, key: &str) -> Option<(&'a Key, &'a Item)> {
self.items.get(key).and_then(|kv| {
if !kv.value.is_none() {
Some((&kv.key, &kv.value))
} else {
None
}
})
}
/// Return mutable references to the key-value pair stored for key, if it is present, else None.
pub fn get_key_value_mut<'a>(&'a mut self, key: &str) -> Option<(KeyMut<'a>, &'a mut Item)> {
self.items.get_mut(key).and_then(|kv| {
if !kv.value.is_none() {
Some((kv.key.as_mut(), &mut kv.value))
} else {
None
}
})
}
/// Returns true iff the table contains given key.
pub fn contains_key(&self, key: &str) -> bool {
if let Some(kv) = self.items.get(key) {
kv.value.is_value()
} else {
false
}
}
/// Inserts a key/value pair if the table does not contain the key.
/// Returns a mutable reference to the corresponding value.
pub fn get_or_insert<V: Into<Value>>(
&mut self,
key: impl Into<InternalString>,
value: V,
) -> &mut Value {
let key = key.into();
self.items
.entry(key.clone())
.or_insert(TableKeyValue::new(Key::new(key), Item::Value(value.into())))
.value
.as_value_mut()
.expect("non-value type in inline table")
}
/// Inserts a key-value pair into the map.
pub fn insert(&mut self, key: impl Into<InternalString>, value: Value) -> Option<Value> {
let key = key.into();
let kv = TableKeyValue::new(Key::new(key.clone()), Item::Value(value));
self.items
.insert(key, kv)
.and_then(|kv| kv.value.into_value().ok())
}
/// Inserts a key-value pair into the map.
pub fn insert_formatted(&mut self, key: &Key, value: Value) -> Option<Value> {
let kv = TableKeyValue::new(key.to_owned(), Item::Value(value));
self.items
.insert(InternalString::from(key.get()), kv)
.filter(|kv| kv.value.is_value())
.map(|kv| kv.value.into_value().unwrap())
}
/// Removes an item given the key.
pub fn remove(&mut self, key: &str) -> Option<Value> {
self.items
.shift_remove(key)
.and_then(|kv| kv.value.into_value().ok())
}
/// Removes a key from the map, returning the stored key and value if the key was previously in the map.
pub fn remove_entry(&mut self, key: &str) -> Option<(Key, Value)> {
self.items.shift_remove(key).and_then(|kv| {
let key = kv.key;
kv.value.into_value().ok().map(|value| (key, value))
})
}
/// Retains only the elements specified by the `keep` predicate.
///
/// In other words, remove all pairs `(key, value)` for which
/// `keep(&key, &mut value)` returns `false`.
///
/// The elements are visited in iteration order.
pub fn retain<F>(&mut self, mut keep: F)
where
F: FnMut(&str, &mut Value) -> bool,
{
self.items.retain(|key, item| {
item.value
.as_value_mut()
.map(|value| keep(key, value))
.unwrap_or(false)
});
}
}
impl std::fmt::Display for InlineTable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
crate::encode::Encode::encode(self, f, None, ("", ""))
}
}
impl<K: Into<Key>, V: Into<Value>> Extend<(K, V)> for InlineTable {
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
for (key, value) in iter {
let key = key.into();
let value = Item::Value(value.into());
let value = TableKeyValue::new(key, value);
self.items
.insert(InternalString::from(value.key.get()), value);
}
}
}
impl<K: Into<Key>, V: Into<Value>> FromIterator<(K, V)> for InlineTable {
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = (K, V)>,
{
let mut table = InlineTable::new();
table.extend(iter);
table
}
}
impl IntoIterator for InlineTable {
type Item = (InternalString, Value);
type IntoIter = InlineTableIntoIter;
fn into_iter(self) -> Self::IntoIter {
Box::new(
self.items
.into_iter()
.filter(|(_, kv)| kv.value.is_value())
.map(|(k, kv)| (k, kv.value.into_value().unwrap())),
)
}
}
impl<'s> IntoIterator for &'s InlineTable {
type Item = (&'s str, &'s Value);
type IntoIter = InlineTableIter<'s>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
fn decorate_inline_table(table: &mut InlineTable) {
for (key_decor, value) in table
.items
.iter_mut()
.filter(|&(_, ref kv)| kv.value.is_value())
.map(|(_, kv)| (&mut kv.key.decor, kv.value.as_value_mut().unwrap()))
{
key_decor.clear();
value.decor_mut().clear();
}
}
/// An owned iterator type over key/value pairs of an inline table.
pub type InlineTableIntoIter = Box<dyn Iterator<Item = (InternalString, Value)>>;
/// An iterator type over key/value pairs of an inline table.
pub type InlineTableIter<'a> = Box<dyn Iterator<Item = (&'a str, &'a Value)> + 'a>;
/// A mutable iterator type over key/value pairs of an inline table.
pub type InlineTableIterMut<'a> = Box<dyn Iterator<Item = (KeyMut<'a>, &'a mut Value)> + 'a>;
impl TableLike for InlineTable {
fn iter(&self) -> Iter<'_> {
Box::new(self.items.iter().map(|(key, kv)| (&key[..], &kv.value)))
}
fn iter_mut(&mut self) -> IterMut<'_> {
Box::new(
self.items
.iter_mut()
.map(|(_, kv)| (kv.key.as_mut(), &mut kv.value)),
)
}
fn clear(&mut self) {
self.clear();
}
fn entry<'a>(&'a mut self, key: &str) -> crate::Entry<'a> {
// Accept a `&str` rather than an owned type to keep `InternalString`, well, internal
match self.items.entry(key.into()) {
indexmap::map::Entry::Occupied(entry) => {
crate::Entry::Occupied(crate::OccupiedEntry { entry })
}
indexmap::map::Entry::Vacant(entry) => {
crate::Entry::Vacant(crate::VacantEntry { entry, key: None })
}
}
}
fn entry_format<'a>(&'a mut self, key: &Key) -> crate::Entry<'a> {
// Accept a `&Key` to be consistent with `entry`
match self.items.entry(key.get().into()) {
indexmap::map::Entry::Occupied(entry) => {
crate::Entry::Occupied(crate::OccupiedEntry { entry })
}
indexmap::map::Entry::Vacant(entry) => crate::Entry::Vacant(crate::VacantEntry {
entry,
key: Some(key.to_owned()),
}),
}
}
fn get<'s>(&'s self, key: &str) -> Option<&'s Item> {
self.items.get(key).map(|kv| &kv.value)
}
fn get_mut<'s>(&'s mut self, key: &str) -> Option<&'s mut Item> {
self.items.get_mut(key).map(|kv| &mut kv.value)
}
fn get_key_value<'a>(&'a self, key: &str) -> Option<(&'a Key, &'a Item)> {
self.get_key_value(key)
}
fn get_key_value_mut<'a>(&'a mut self, key: &str) -> Option<(KeyMut<'a>, &'a mut Item)> {
self.get_key_value_mut(key)
}
fn contains_key(&self, key: &str) -> bool {
self.contains_key(key)
}
fn insert(&mut self, key: &str, value: Item) -> Option<Item> {
self.insert(key, value.into_value().unwrap())
.map(Item::Value)
}
fn remove(&mut self, key: &str) -> Option<Item> {
self.remove(key).map(Item::Value)
}
fn get_values(&self) -> Vec<(Vec<&Key>, &Value)> {
self.get_values()
}
fn fmt(&mut self) {
self.fmt()
}
fn sort_values(&mut self) {
self.sort_values()
}
fn set_dotted(&mut self, yes: bool) {
self.set_dotted(yes)
}
fn is_dotted(&self) -> bool {
self.is_dotted()
}
fn key_decor_mut(&mut self, key: &str) -> Option<&mut Decor> {
self.key_decor_mut(key)
}
fn key_decor(&self, key: &str) -> Option<&Decor> {
self.key_decor(key)
}
}
// `{ key1 = value1, ... }`
pub(crate) const DEFAULT_INLINE_KEY_DECOR: (&str, &str) = (" ", " ");
/// A view into a single location in a map, which may be vacant or occupied.
pub enum InlineEntry<'a> {
/// An occupied Entry.
Occupied(InlineOccupiedEntry<'a>),
/// A vacant Entry.
Vacant(InlineVacantEntry<'a>),
}
impl<'a> InlineEntry<'a> {
/// Returns the entry key
///
/// # Examples
///
/// ```
/// use toml_edit::Table;
///
/// let mut map = Table::new();
///
/// assert_eq!("hello", map.entry("hello").key());
/// ```
pub fn key(&self) -> &str {
match self {
InlineEntry::Occupied(e) => e.key(),
InlineEntry::Vacant(e) => e.key(),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
pub fn or_insert(self, default: Value) -> &'a mut Value {
match self {
InlineEntry::Occupied(entry) => entry.into_mut(),
InlineEntry::Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
pub fn or_insert_with<F: FnOnce() -> Value>(self, default: F) -> &'a mut Value {
match self {
InlineEntry::Occupied(entry) => entry.into_mut(),
InlineEntry::Vacant(entry) => entry.insert(default()),
}
}
}
/// A view into a single occupied location in a `IndexMap`.
pub struct InlineOccupiedEntry<'a> {
entry: indexmap::map::OccupiedEntry<'a, InternalString, TableKeyValue>,
}
impl<'a> InlineOccupiedEntry<'a> {
/// Gets a reference to the entry key
///
/// # Examples
///
/// ```
/// use toml_edit::Table;
///
/// let mut map = Table::new();
///
/// assert_eq!("foo", map.entry("foo").key());
/// ```
pub fn key(&self) -> &str {
self.entry.key().as_str()
}
/// Gets a mutable reference to the entry key
pub fn key_mut(&mut self) -> KeyMut<'_> {
self.entry.get_mut().key.as_mut()
}
/// Gets a reference to the value in the entry.
pub fn get(&self) -> &Value {
self.entry.get().value.as_value().unwrap()
}
/// Gets a mutable reference to the value in the entry.
pub fn get_mut(&mut self) -> &mut Value {
self.entry.get_mut().value.as_value_mut().unwrap()
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself
pub fn into_mut(self) -> &'a mut Value {
self.entry.into_mut().value.as_value_mut().unwrap()
}
/// Sets the value of the entry, and returns the entry's old value
pub fn insert(&mut self, value: Value) -> Value {
let mut value = Item::Value(value);
std::mem::swap(&mut value, &mut self.entry.get_mut().value);
value.into_value().unwrap()
}
/// Takes the value out of the entry, and returns it
pub fn remove(self) -> Value {
self.entry.shift_remove().value.into_value().unwrap()
}
}
/// A view into a single empty location in a `IndexMap`.
pub struct InlineVacantEntry<'a> {
entry: indexmap::map::VacantEntry<'a, InternalString, TableKeyValue>,
key: Option<Key>,
}
impl<'a> InlineVacantEntry<'a> {
/// Gets a reference to the entry key
///
/// # Examples
///
/// ```
/// use toml_edit::Table;
///
/// let mut map = Table::new();
///
/// assert_eq!("foo", map.entry("foo").key());
/// ```
pub fn key(&self) -> &str {
self.entry.key().as_str()
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it
pub fn insert(self, value: Value) -> &'a mut Value {
let entry = self.entry;
let key = self.key.unwrap_or_else(|| Key::new(entry.key().as_str()));
let value = Item::Value(value);
entry
.insert(TableKeyValue::new(key, value))
.value
.as_value_mut()
.unwrap()
}
}
|
//! Types for working with compiled Piccolo bytecode.
use crate::Constant;
use super::op::Opcode;
use super::{ChunkOffset, Line};
/// Stores a piece of compiled Piccolo bytecode.
#[derive(Default, Debug)]
pub struct Chunk {
pub(crate) data: Vec<u8>,
pub(crate) lines: Vec<Line>,
pub(crate) constants: Vec<Constant>,
}
impl Chunk {
pub(crate) fn write_u8<T: Into<u8>>(&mut self, byte: T, line: Line) {
let byte = byte.into();
trace!("write u8 {:04x}={:02x}", self.data.len(), byte);
self.data.push(byte);
self.add_to_line(line);
}
pub(crate) fn write_u16<T: Into<u16>>(&mut self, bytes: T, line: Line) {
let (low, high) = crate::decode_bytes(bytes.into());
self.write_u8(low, line);
self.write_u8(high, line);
}
pub(crate) fn write_arg_u16<T: Into<u8>>(&mut self, op: T, arg: u16, line: Line) {
self.write_u8(op, line);
self.write_u16(arg, line);
}
pub(crate) fn start_jump(&mut self, op: Opcode, line: Line) -> ChunkOffset {
trace!("write jump to idx {:x}", self.data.len());
self.write_u8(op, line);
self.write_u8(Opcode::Assert, line);
self.write_u8(Opcode::False, line);
self.data.len() - 2
}
pub(crate) fn patch_jump(&mut self, offset: ChunkOffset) {
let jump = self.data.len() - offset - 2;
if jump > u16::MAX as ChunkOffset {
panic!("cannot jump further than u16::MAX instructions");
} else {
let (low, high) = crate::decode_bytes(jump as u16);
trace!("patch jump at idx {:x}={:04x}", jump, offset);
self.data[offset] = low;
self.data[offset + 1] = high;
}
}
pub(crate) fn write_jump_back(&mut self, offset: ChunkOffset, line: Line) {
// we haven't written the JumpBack instruction yet, so we need to add it
// in order to calculate the actual offset when we write the jump instruction
let offset = self.data.len() - offset + 3;
self.write_arg_u16(Opcode::JumpBack, offset as u16, line);
}
// allows for duplicate constants, non-duplicates are checked in the compiler
pub(crate) fn make_constant(&mut self, value: Constant) -> u16 {
trace!("make constant {:?}", value);
self.constants.push(value);
let idx = self.constants.len() - 1;
if idx > u16::MAX as ChunkOffset {
panic!("too many constants (>65k, fix your program)");
} else {
idx as u16
}
}
pub(crate) fn read_short(&self, offset: ChunkOffset) -> u16 {
trace!("read short {:x}", offset);
let low = self.data[offset];
let high = self.data[offset + 1];
crate::encode_bytes(low, high)
}
// get a line number from a byte offset using run-length encoding
pub(crate) fn get_line_from_index(&self, idx: ChunkOffset) -> Line {
let mut total_ops = 0;
for (offset_line, num_ops) in self.lines.iter().enumerate() {
total_ops += *num_ops;
if total_ops > idx {
return offset_line + 1;
}
}
panic!("no line for idx {} {:?} {:?}", idx, self.lines, self.data);
}
// add one opcode to a line
fn add_to_line(&mut self, line: Line) {
while line > self.lines.len() {
self.lines.push(0);
}
self.lines[line - 1] += 1;
}
pub fn disassemble(&self, name: &str) -> String {
trace!("disassemble");
let mut s = format!(" -- {} --\n", name);
s.push_str(" ++ constants\n");
for (idx, constant) in self.constants.iter().enumerate() {
s.push_str(&format!("{:04x} {:?}\n", idx, constant));
}
s.push_str(" ++ code\n");
let mut offset = 0;
while offset < self.data.len() {
s.push_str(&self.disassemble_instruction(offset));
s.push('\n');
offset += super::op::op_len(self.data[offset].into());
}
s
}
pub fn disassemble_instruction(&self, offset: ChunkOffset) -> String {
let op = self.data[offset].into();
let len = super::op::op_len(op);
let bytes = format!(
"{first:02x}{others}",
first = op as u8,
others = if len > 1 {
let mut s = String::new();
for i in 1..len {
s.push_str(&format!(" {:02x}", self.data[offset + i]));
}
s
} else {
String::from("")
}
);
let line = self.get_line_from_index(offset);
let line_str = format!("{:04x} {:>4}", offset, line);
let op_str = format!("{:15}", format!("{:?}", op));
let arg = match op {
Opcode::Constant => {
let idx = self.read_short(offset + 1);
format!("@{:04x} ({:?})", idx, self.constants[idx as usize])
}
Opcode::GetLocal | Opcode::SetLocal => {
let idx = self.read_short(offset + 1);
format!("${}", idx)
}
Opcode::GetGlobal | Opcode::SetGlobal | Opcode::DeclareGlobal => {
let idx = self.read_short(offset + 1);
format!("g{:04x} ({:?})", idx, self.constants[idx as usize])
}
Opcode::JumpForward | Opcode::JumpFalse | Opcode::JumpTrue => {
let idx = self.read_short(offset + 1);
format!("+{:04x}", idx)
}
Opcode::JumpBack => {
let idx = self.read_short(offset + 1);
format!("-{:04x}", idx)
}
_ => String::new(),
};
format!(
"{bytes:9} {line_str} | {op_str} {arg}",
bytes = bytes,
line_str = line_str,
op_str = op_str,
arg = arg
)
}
}
#[cfg(test)]
mod test {
use crate::runtime::op::Opcode;
use super::Chunk;
#[test]
fn get_line_from_index() {
let mut c = Chunk::default();
c.write_u8(Opcode::Return, 1); // 0
c.write_u8(Opcode::Return, 1); // 1
c.write_u8(Opcode::Return, 1); // 2
c.write_u8(Opcode::Return, 1); // 3
c.write_u8(Opcode::Return, 1); // 4
c.write_u8(Opcode::Return, 1); // 5
c.write_u8(Opcode::Return, 2); // 6
c.write_u8(Opcode::Return, 2); // 7
c.write_u8(Opcode::Return, 2); // 8
c.write_u8(Opcode::Return, 2); // 9
c.write_u8(Opcode::Return, 2); // 10
c.write_u8(Opcode::Return, 3); // 11
c.write_u8(Opcode::Return, 3); // 12
c.write_u8(Opcode::Return, 3); // 13
c.write_u8(Opcode::Return, 3); // 14
c.write_u8(Opcode::Return, 4); // 15
c.write_u8(Opcode::Return, 4); // 16
c.write_u8(Opcode::Return, 4); // 17
c.write_u8(Opcode::Return, 4); // 18
c.write_u8(Opcode::Return, 5); // 19
assert_eq!(c.get_line_from_index(0), 1);
assert_eq!(c.get_line_from_index(5), 1);
assert_eq!(c.get_line_from_index(6), 2);
assert_eq!(c.get_line_from_index(10), 2);
assert_eq!(c.get_line_from_index(11), 3);
assert_eq!(c.get_line_from_index(14), 3);
}
}
|
use core::f64;
use super::sqrt;
const SPLIT: f64 = 134217728. + 1.; // 0x1p27 + 1 === (2 ^ 27) + 1
fn sq(x: f64) -> (f64, f64) {
let xh: f64;
let xl: f64;
let xc: f64;
xc = x * SPLIT;
xh = x - xc + xc;
xl = x - xh;
let hi = x * x;
let lo = xh * xh - hi + 2. * xh * xl + xl * xl;
(hi, lo)
}
#[cfg_attr(all(test, assert_no_panic), no_panic::no_panic)]
pub fn hypot(mut x: f64, mut y: f64) -> f64 {
let x1p700 = f64::from_bits(0x6bb0000000000000); // 0x1p700 === 2 ^ 700
let x1p_700 = f64::from_bits(0x1430000000000000); // 0x1p-700 === 2 ^ -700
let mut uxi = x.to_bits();
let mut uyi = y.to_bits();
let uti;
let ex: i64;
let ey: i64;
let mut z: f64;
/* arrange |x| >= |y| */
uxi &= -1i64 as u64 >> 1;
uyi &= -1i64 as u64 >> 1;
if uxi < uyi {
uti = uxi;
uxi = uyi;
uyi = uti;
}
/* special cases */
ex = (uxi >> 52) as i64;
ey = (uyi >> 52) as i64;
x = f64::from_bits(uxi);
y = f64::from_bits(uyi);
/* note: hypot(inf,nan) == inf */
if ey == 0x7ff {
return y;
}
if ex == 0x7ff || uyi == 0 {
return x;
}
/* note: hypot(x,y) ~= x + y*y/x/2 with inexact for small y/x */
/* 64 difference is enough for ld80 double_t */
if ex - ey > 64 {
return x + y;
}
/* precise sqrt argument in nearest rounding mode without overflow */
/* xh*xh must not overflow and xl*xl must not underflow in sq */
z = 1.;
if ex > 0x3ff + 510 {
z = x1p700;
x *= x1p_700;
y *= x1p_700;
} else if ey < 0x3ff - 450 {
z = x1p_700;
x *= x1p700;
y *= x1p700;
}
let (hx, lx) = sq(x);
let (hy, ly) = sq(y);
z * sqrt(ly + lx + hy + hx)
}
|
//! Tests the `influxdb_iox remote` commands
use std::path::Path;
use super::get_object_store_id;
use assert_cmd::Command;
use futures::FutureExt;
use import_export::file::ExportedContents;
use predicates::prelude::*;
use tempfile::tempdir;
use test_helpers_end_to_end::{maybe_skip_integration, MiniCluster, Step, StepTest, StepTestState};
use tokio::fs;
/// Get all Parquet files for a table, using the command `remote store get-table`
#[tokio::test]
async fn remote_store_get_table() {
test_helpers::maybe_start_logging();
let database_url = maybe_skip_integration!();
let table_name = "my_awesome_table";
let other_table_name = "my_ordinary_table";
let mut cluster = MiniCluster::create_shared(database_url).await;
StepTest::new(
&mut cluster,
vec![
// Persist some data
Step::RecordNumParquetFiles,
Step::WriteLineProtocol(format!("{table_name},tag1=A,tag2=B val=42i 123456")),
Step::WaitForPersisted {
expected_increase: 1,
},
// Persist some more data for the same table in a 2nd Parquet file
Step::RecordNumParquetFiles,
Step::WriteLineProtocol(format!("{table_name},tag1=C,tag2=B val=9000i 789000")),
Step::WaitForPersisted {
expected_increase: 1,
},
// Persist some more data for a different table
Step::RecordNumParquetFiles,
Step::WriteLineProtocol(format!("{other_table_name},tag1=A,tag2=B val=42i 123456")),
Step::WaitForPersisted {
expected_increase: 1,
},
Step::Custom(Box::new(move |state: &mut StepTestState| {
async move {
let router_addr = state.cluster().router().router_grpc_base().to_string();
let namespace = state.cluster().namespace().to_string();
// Ensure files are actually written to the filesystem
let dir = tempfile::tempdir().expect("could not get temporary directory");
Command::cargo_bin("influxdb_iox")
.unwrap()
.current_dir(&dir)
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("store")
.arg("get-table")
.arg(&namespace)
.arg(table_name)
.assert()
.success();
// There should be a directory created that, by
// default, is named the same as the table
let table_dir = dir.as_ref().join(table_name);
assert_two_parquet_files_and_meta(&table_dir);
// The `-o` argument should specify where the files go instead of a directory
// named after the table. Note that this `Command` doesn't set `current dir`;
// the `-o` argument shouldn't have anything to do with the current working
// directory.
let custom_output_dir = dir.as_ref().join("my_special_directory");
Command::cargo_bin("influxdb_iox")
.unwrap()
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("store")
.arg("get-table")
.arg("-o")
.arg(&custom_output_dir)
.arg(&namespace)
// This time ask for the table that only has one Parquet file
.arg(other_table_name)
.assert()
.success();
let contents = assert_one_parquet_file_and_meta(&custom_output_dir);
// Specifying a table that doesn't exist prints an error message
Command::cargo_bin("influxdb_iox")
.unwrap()
.current_dir(&dir)
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("store")
.arg("get-table")
.arg(&namespace)
.arg("nacho-table")
.assert()
.failure()
.stderr(predicate::str::contains("Table nacho-table not found"));
// Specifying a namespace that doesn't exist prints an error message
Command::cargo_bin("influxdb_iox")
.unwrap()
.current_dir(&dir)
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("store")
.arg("get-table")
.arg("nacho-namespace")
.arg(table_name)
.assert()
.failure()
.stderr(predicate::str::contains(
"Namespace nacho-namespace not found",
));
// Running the same command again shouldn't download any new files
Command::cargo_bin("influxdb_iox")
.unwrap()
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("store")
.arg("get-table")
.arg("-o")
.arg(&custom_output_dir)
.arg(&namespace)
.arg(other_table_name)
.assert()
.success()
.stdout(predicate::str::contains(format!(
"skipping file 1 of 1 ({} already exists with expected file size)",
contents.parquet_file_name(0).unwrap(),
)));
// If the file sizes don't match, re-download that file
fs::write(&contents.parquet_files()[0], b"not parquet")
.await
.unwrap();
Command::cargo_bin("influxdb_iox")
.unwrap()
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("store")
.arg("get-table")
.arg("-o")
.arg(&custom_output_dir)
.arg(&namespace)
.arg(other_table_name)
.assert()
.success()
.stdout(predicate::str::contains(format!(
"downloading file 1 of 1 ({})...",
contents.parquet_file_name(0).unwrap(),
)));
}
.boxed()
})),
Step::Custom(Box::new(move |state: &mut StepTestState| {
async move {
// Test that we can download files from the querier (not just the router)
// to ensure it has the correct grpc services
let querier_addr = state.cluster().querier().querier_grpc_base().to_string();
let namespace = state.cluster().namespace().to_string();
// Ensure files are actually written to the filesystem
let dir = tempfile::tempdir().expect("could not get temporary directory");
Command::cargo_bin("influxdb_iox")
.unwrap()
.current_dir(&dir)
.arg("-h")
.arg(&querier_addr)
.arg("remote")
.arg("store")
.arg("get-table")
.arg(&namespace)
.arg(table_name)
.assert()
.success();
// There should be a directory created that, by
// default, is named the same as the table
let table_dir = dir.as_ref().join(table_name);
assert_two_parquet_files_and_meta(&table_dir);
}
.boxed()
})),
],
)
.run()
.await
}
/// Asserts that the directory contains metadata and parquet files for
/// 1 partition that has 1 table with 1 parquet files
fn assert_one_parquet_file_and_meta(table_dir: &Path) -> ExportedContents {
let contents = ExportedContents::try_new(table_dir).unwrap();
assert_eq!(contents.parquet_files().len(), 1);
assert_eq!(contents.parquet_json_files().len(), 1);
assert_eq!(contents.table_json_files().len(), 1);
assert_eq!(contents.partition_json_files().len(), 1);
contents
}
/// Asserts that the directory contains metadata and parquet files for
/// 1 partition that has 1 table with 2 parquet files
fn assert_two_parquet_files_and_meta(table_dir: &Path) {
let contents = ExportedContents::try_new(table_dir).unwrap();
assert_eq!(contents.parquet_files().len(), 2);
assert_eq!(contents.parquet_json_files().len(), 2);
assert_eq!(contents.table_json_files().len(), 1);
assert_eq!(contents.partition_json_files().len(), 1);
}
/// remote partition command and getting a parquet file from the object store and pulling the
/// files, using these commands:
///
/// - `remote partition show`
/// - `remote store get`
#[tokio::test]
async fn remote_partition_and_get_from_store_and_pull() {
test_helpers::maybe_start_logging();
let database_url = maybe_skip_integration!();
// The test below assumes a specific partition id, so use a
// non-shared one here so concurrent tests don't interfere with
// each other
let mut cluster = MiniCluster::create_non_shared(database_url).await;
StepTest::new(
&mut cluster,
vec![
Step::RecordNumParquetFiles,
Step::WriteLineProtocol(String::from(
"my_awesome_table,tag1=A,tag2=B val=42i 123456",
)),
// wait for partitions to be persisted
Step::WaitForPersisted {
expected_increase: 1,
},
// Run the 'remote partition' command
Step::Custom(Box::new(|state: &mut StepTestState| {
async {
let router_addr = state.cluster().router().router_grpc_base().to_string();
// Validate the output of the remote partition CLI command
//
// Looks like:
// {
// "id": "1",
// "namespaceId": 1,
// "tableId": 1,
// "partitionId": "1",
// "objectStoreId": "fa6cdcd1-cbc2-4fb7-8b51-4773079124dd",
// "minTime": "123456",
// "maxTime": "123456",
// "fileSizeBytes": "2029",
// "rowCount": "1",
// "createdAt": "1650019674289347000"
// }
let out = Command::cargo_bin("influxdb_iox")
.unwrap()
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("partition")
.arg("show")
.arg("1")
.assert()
.success()
.stdout(predicate::str::contains(
r#""hashId": "uGKn6bMp7mpBjN4ZEZjq6xUSdT8ZuHqB3vKubD0O0jc=""#,
))
.get_output()
.stdout
.clone();
let object_store_id = get_object_store_id(&out);
let dir = tempdir().unwrap();
let f = dir.path().join("tmp.parquet");
let filename = f.as_os_str().to_str().unwrap();
Command::cargo_bin("influxdb_iox")
.unwrap()
.arg("-h")
.arg(&router_addr)
.arg("remote")
.arg("store")
.arg("get")
.arg(&object_store_id)
.arg(filename)
.assert()
.success()
.stdout(
predicate::str::contains("wrote")
.and(predicate::str::contains(filename)),
);
}
.boxed()
})),
],
)
.run()
.await
}
|
use std::{num::NonZeroU64, sync::Arc, time::Duration};
use iox_time::{Time, TimeProvider};
/// Limits `send` actions to a specific "messages per second".
#[derive(Debug)]
pub struct RateLimiter {
wait_time: Duration,
last_msg: Option<Time>,
time_provider: Arc<dyn TimeProvider>,
}
impl RateLimiter {
/// Create new rate limiter using the given config.
pub fn new(msgs_per_second: NonZeroU64, time_provider: Arc<dyn TimeProvider>) -> Self {
Self {
wait_time: Duration::from_secs_f64(1.0 / msgs_per_second.get() as f64),
last_msg: None,
time_provider,
}
}
/// Record a send action.
///
/// This may async-block if the rate limit was hit until the it is OK to send a message again.
///
/// It is safe to cancel this method.
pub async fn send(&mut self) {
let mut now = self.time_provider.now();
if let Some(last) = &self.last_msg {
let wait_until = *last + self.wait_time;
if wait_until > now {
self.time_provider.sleep_until(wait_until).await;
// refresh `now`
now = self.time_provider.now();
}
}
// modify AFTER `await` due to cancellation
self.last_msg = Some(now);
}
}
#[cfg(test)]
mod tests {
use iox_time::MockProvider;
use std::future::Future;
use super::*;
#[tokio::test]
async fn new_always_works() {
let mut limiter = RateLimiter::new(
NonZeroU64::new(1).unwrap(),
Arc::new(MockProvider::new(Time::MIN)),
);
limiter.send().await;
}
#[tokio::test]
async fn u64_max_msgs_per_second() {
let mut limiter = RateLimiter::new(
NonZeroU64::new(u64::MAX).unwrap(),
Arc::new(MockProvider::new(Time::MIN)),
);
limiter.send().await;
limiter.send().await;
}
#[tokio::test]
async fn throttle() {
let time_provider = Arc::new(MockProvider::new(Time::MIN));
let mut limiter =
RateLimiter::new(NonZeroU64::new(1).unwrap(), Arc::clone(&time_provider) as _);
limiter.send().await;
{
// do NOT advance time
let fut = limiter.send();
tokio::pin!(fut);
assert_fut_pending(&mut fut).await;
// tick
time_provider.inc(Duration::from_secs(1));
fut.await;
// fut dropped here (important because it mut-borrows `limiter`)
}
// tick (but not enough)
time_provider.inc(Duration::from_millis(500));
let fut = limiter.send();
tokio::pin!(fut);
assert_fut_pending(&mut fut).await;
// tick (enough)
time_provider.inc(Duration::from_millis(500));
fut.await;
}
#[tokio::test]
async fn throttle_after_cancel() {
let time_provider = Arc::new(MockProvider::new(Time::MIN));
let mut limiter =
RateLimiter::new(NonZeroU64::new(1).unwrap(), Arc::clone(&time_provider) as _);
limiter.send().await;
// do NOT advance time
{
let fut = limiter.send();
tokio::pin!(fut);
assert_fut_pending(&mut fut).await;
// fut dropped here
}
// 2nd try should still be pending
let fut = limiter.send();
tokio::pin!(fut);
assert_fut_pending(&mut fut).await;
// tick
time_provider.inc(Duration::from_secs(1));
fut.await;
}
/// Assert that given future is pending.
async fn assert_fut_pending<F>(fut: &mut F)
where
F: Future + Send + Unpin,
F::Output: std::fmt::Debug,
{
tokio::select! {
e = fut => panic!("future is not pending, yielded: {e:?}"),
_ = tokio::time::sleep(Duration::from_millis(10)) => {},
};
}
}
|
use crate::{HdbError, HdbResult, HdbValue};
#[cfg(feature = "sync")]
use byteorder::{LittleEndian, ReadBytesExt};
const NULL_REPRESENTATION: i64 = 315_538_070_401;
const SECOND_FACTOR: i64 = 1;
const MINUTE_FACTOR: i64 = 60;
const HOUR_FACTOR: i64 = 3_600;
const DAY_FACTOR: i64 = 86_400;
const ZEITENWENDE: i64 = 1_721_424;
const JGREG: i64 = 2_299_161;
// const IGREG: i64 = 18_994; // Julian day of 01.01.0001 n. Chr.
/// Implementation of HANA's `SecondDate`.
///
/// The type is used internally to implement deserialization from the wire.
/// It is agnostic of timezones.
#[derive(Clone, Debug, Serialize)]
pub struct SecondDate(i64);
impl std::fmt::Display for SecondDate {
// The format chosen supports the conversion to chrono types.
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
let (year, month, day, hour, minute, second) = self.as_ymd_hms();
write!(
fmt,
"{year:04}-{month:02}-{day:02}T{hour:02}:{minute:02}:{second:02}",
)
}
}
impl std::cmp::PartialEq<SecondDate> for SecondDate {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl SecondDate {
pub(crate) fn new(raw: i64) -> Self {
Self(raw)
}
pub(crate) fn ref_raw(&self) -> &i64 {
&self.0
}
// Convert into tuple of "elements".
#[allow(clippy::cast_possible_truncation)]
#[allow(clippy::cast_precision_loss)]
#[allow(clippy::cast_sign_loss)]
pub(crate) fn as_ymd_hms(&self) -> (i32, u8, u8, u8, u8, u8) {
let value = match self.0 {
0 => 0, // maps the special value '' == 0 to '0001-01-01 00:00:00.000000000' = 1
v => v - 1,
};
let datevalue = value / DAY_FACTOR;
let mut timevalue = value - (datevalue * DAY_FACTOR);
let hour: u8 = (timevalue / HOUR_FACTOR) as u8;
timevalue -= HOUR_FACTOR * (i64::from(hour));
let minute: u8 = (timevalue / MINUTE_FACTOR) as u8;
timevalue -= MINUTE_FACTOR * (i64::from(minute));
let second: u8 = (timevalue / SECOND_FACTOR) as u8;
let julian: i64 = datevalue + ZEITENWENDE;
let ja: i64 = if julian >= JGREG {
let jalpha: i64 = (((julian - 1_867_216) as f64 - 0.25_f64) / 36_524.25_f64) as i64;
julian + 1 + jalpha - ((0.25_f64 * jalpha as f64) as i64)
} else {
julian
};
let jb: i64 = ja + 1524;
let jc: i64 = (6680_f64 + ((jb - 2_439_870) as f64 - 122.1_f64) / 365.25_f64) as i64;
let jd: i64 = ((365 * jc) as f64 + (0.25_f64 * jc as f64)) as i64;
let je: i64 = ((jb - jd) as f64 / 30.6001) as i64;
let day: u8 = (jb - jd - ((30.6001 * je as f64) as i64)) as u8;
let mut month: u8 = je as u8 - 1;
let mut year: i32 = jc as i32 - 4715;
if month > 12 {
month -= 12;
}
if month > 2 {
year -= 1;
}
if year <= 0 {
year -= 1;
}
(year, month, day, hour, minute, second)
}
}
#[cfg(feature = "sync")]
pub(crate) fn parse_seconddate_sync(
nullable: bool,
rdr: &mut dyn std::io::Read,
) -> HdbResult<HdbValue<'static>> {
let i = rdr.read_i64::<LittleEndian>()?;
if i == NULL_REPRESENTATION {
if nullable {
Ok(HdbValue::NULL)
} else {
Err(HdbError::Impl(
"found NULL value for NOT NULL SECONDDATE column",
))
}
} else {
Ok(HdbValue::SECONDDATE(SecondDate::new(i)))
}
}
#[cfg(feature = "async")]
pub(crate) async fn parse_seconddate_async<R: std::marker::Unpin + tokio::io::AsyncReadExt>(
nullable: bool,
rdr: &mut R,
) -> HdbResult<HdbValue<'static>> {
let i = rdr.read_i64_le().await?;
if i == NULL_REPRESENTATION {
if nullable {
Ok(HdbValue::NULL)
} else {
Err(HdbError::Impl(
"found NULL value for NOT NULL SECONDDATE column",
))
}
} else {
Ok(HdbValue::SECONDDATE(SecondDate::new(i)))
}
}
|
#![feature(proc_macro_diagnostic)]
#![feature(proc_macro_quote)]
use proc_macro::Delimiter;
use proc_macro::TokenStream;
use proc_macro::TokenTree;
fn expect_group(item: Option<TokenTree>, delimiter: Delimiter) -> TokenStream {
match item {
Some(TokenTree::Group(i)) => {
if i.delimiter() == delimiter {
return i.stream();
}
let err = i.span().error(format!(
"Expected an {:?}, saw {:?} ",
delimiter,
i.delimiter()
));
err.emit();
panic!("Syntax error");
}
None => {
panic!("Expected delimiter, found end of macro")
}
Some(i) => {
let err = i.span().error("Expected an ident");
err.emit();
panic!("Syntax error");
}
}
}
fn expect_ident(item: Option<TokenTree>) -> String {
match item {
Some(TokenTree::Ident(i)) => i.to_string(),
None => {
panic!("Expected identifier, found end of macro")
}
Some(i) => {
let err = i.span().error("Expected an ident");
err.emit();
panic!("Syntax error");
}
}
}
fn special_type(t: &str) -> Option<String> {
match t {
/* We don't use types from the fixed crate here because fixed-point
arithmetic is an artefact of the storage format of OpenType, and
not something we want to foist on the user. It's more ergonomic
for them to be able to manipulate plain f32s. */
"Fixed" => Some("f32".to_string()),
"F2DOT14" => Some("f32".to_string()),
/* But we *do* use fixed point here, because we want to be able to
compare fractional version numbers for equality without having to
do epsilon dances. */
"Version16Dot16" => Some("U16F16".to_string()),
"Offset16" => Some("u16".to_string()),
"Offset32" => Some("u32".to_string()),
"LONGDATETIME" => Some("chrono::NaiveDateTime".to_string()),
_ => None,
}
}
#[proc_macro]
pub fn tables(item: TokenStream) -> TokenStream {
let mut output = TokenStream::new();
let mut iter = item.into_iter();
let mut out_s = String::new();
loop {
// First parse table name
let maybe_table_name = iter.next();
if maybe_table_name.is_none() {
break;
}
let table_name = expect_ident(maybe_table_name);
out_s.push_str(&format!(
"/// Low-level structure used for serializing/deserializing table\n#[allow(missing_docs)]\n#[derive(Serialize, Deserialize, Debug, PartialEq)]\npub struct {} {{",
table_name,
));
let mut table_def = expect_group(iter.next(), Delimiter::Brace).into_iter();
loop {
let maybe_t = table_def.next();
if maybe_t.is_none() {
break;
}
let t = expect_ident(maybe_t);
if t == "Maybe" {
let subtype = expect_group(table_def.next(), Delimiter::Parenthesis)
.into_iter()
.next()
.unwrap()
.to_string();
let name = expect_ident(table_def.next());
out_s.push_str(&format!("pub {} : Option<{}>,\n", name, subtype))
} else if t == "Counted" {
let subtype = expect_group(table_def.next(), Delimiter::Parenthesis)
.into_iter()
.next()
.unwrap()
.to_string();
out_s.push_str("#[serde(with = \"Counted\")]\n");
let name = expect_ident(table_def.next());
out_s.push_str(&format!("pub {} : Vec<{}>,\n", name, subtype))
} else if let Some(nonspecial_type) = special_type(&t) {
out_s.push_str(&format!("#[serde(with = \"{}\")]\n", t));
let name = expect_ident(table_def.next());
out_s.push_str(&format!("pub {} : {},\n", name, nonspecial_type))
} else {
let name = expect_ident(table_def.next());
out_s.push_str(&format!("pub {} : {},\n", name, t))
}
}
out_s.push('}');
}
let ts1: TokenStream = out_s.parse().unwrap();
output.extend(ts1);
output
}
|
#![no_main]
#![no_std]
extern crate cortex_m;
extern crate cortex_m_rt as rt;
extern crate cortex_m_semihosting as sh;
extern crate panic_halt; // you can put a breakpoint on `rust_begin_unwind` to catch panics
extern crate atomic_queue;
extern crate embedded_hal;
#[macro_use]
extern crate lazy_static;
extern crate tm4c129x_hal as hal;
extern crate tm4c129x as device;
use core::fmt::Write;
use rt::ExceptionFrame;
use rt::exception;
use rt::entry;
use sh::hio;
use device::interrupt;
use hal::prelude::*;
use hal::timer::{TIMER0, TIMER1, TIMER2, TIMER3, TIMER4, TIMER5, TIMER6, TIMER7};
use hal::time::Hertz;
use hal::timer::Event;
use atomic_queue::AtomicQueue;
macro_rules! timer_interrupt_macro {
( $($int_name:ident,$timer_path:ident,$timer_num:tt;)+ ) => {
$(
#[interrupt]
fn $int_name() {
// clear the interrupt
unsafe { (*device::$timer_path::ptr()).icr.write(|w| w.tatocint().bit(true)); }
match MSG_QUEUE.push($timer_num) {
_ => {},
}
}
)+
}
}
macro_rules! configure_timers {
( $pc_ref:expr, $clk_ref:expr ; $( ($var_name:ident, $peripheral:expr, $hal_func:path, $interrupt:ident) ,)+ ) => {
$(
let mut $var_name = {
let mut tim = $hal_func (
$peripheral,
Hertz(10),
$pc_ref,
$clk_ref,
);
unsafe { device::NVIC::unmask(device::Interrupt::$interrupt); }
tim.listen(Event::TimeOut);
tim
};
)+
}
}
static mut MSG_QUEUE_STORAGE: [u8; 100] = [0; 100];
lazy_static! {
static ref MSG_QUEUE: AtomicQueue<'static, u8> = {
let m = unsafe { AtomicQueue::new(&mut MSG_QUEUE_STORAGE) };
m
};
}
#[entry]
fn main() -> ! {
// Set up semihosting
let mut stdout = hio::hstdout().unwrap();
writeln!(stdout, "Semihosting initialized").unwrap();
let peripherals = hal::Peripherals::take().unwrap();
let _core_peripherals = hal::CorePeripherals::take().unwrap();
writeln!(stdout, "Setting up system clock").unwrap();
let mut sc = peripherals.SYSCTL.constrain();
sc.clock_setup.oscillator = hal::sysctl::Oscillator::Main(
hal::sysctl::CrystalFrequency::_16mhz,
hal::sysctl::SystemClock::UsePll(hal::sysctl::PllOutputFrequency::_120mhz),
);
let clocks = sc.clock_setup.freeze();
writeln!(stdout, "Enabling GPIO PORTA").unwrap();
let mut porta = peripherals.GPIO_PORTA_AHB.split(&sc.power_control);
writeln!(stdout, "Activating UART0").unwrap();
let mut uart = hal::serial::Serial::uart0(
peripherals.UART0,
porta
.pa1
.into_af_push_pull::<hal::gpio::AF1>(&mut porta.control),
porta
.pa0
.into_af_push_pull::<hal::gpio::AF1>(&mut porta.control),
(),
(),
115200_u32.bps(),
hal::serial::NewlineMode::SwapLFtoCRLF,
&clocks,
&sc.power_control,
);
configure_timers! { &sc.power_control, &clocks;
(timer0, peripherals.TIMER0, hal::timer::Timer::<TIMER0>::timer0, TIMER0A),
(timer1, peripherals.TIMER1, hal::timer::Timer::<TIMER1>::timer1, TIMER1A),
(timer2, peripherals.TIMER2, hal::timer::Timer::<TIMER2>::timer2, TIMER2A),
(timer3, peripherals.TIMER3, hal::timer::Timer::<TIMER3>::timer3, TIMER3A),
(timer4, peripherals.TIMER4, hal::timer::Timer::<TIMER4>::timer4, TIMER4A),
(timer5, peripherals.TIMER5, hal::timer::Timer::<TIMER5>::timer5, TIMER5A),
(timer6, peripherals.TIMER6, hal::timer::Timer::<TIMER6>::timer6, TIMER6A),
(timer7, peripherals.TIMER7, hal::timer::Timer::<TIMER7>::timer7, TIMER7A),
};
writeln!(stdout, "Starting timers. Listen on UART0 for timout messages.").unwrap();
timer0.start(Hertz(10));
timer1.start(Hertz(20));
timer2.start(Hertz(30));
timer3.start(Hertz(40));
timer4.start(Hertz(50));
timer5.start(Hertz(60));
timer6.start(Hertz(60));
timer7.start(Hertz(70));
loop {
match MSG_QUEUE.pop() {
Some(0) => {
writeln!(uart, "Timer 0").unwrap();
timer0.start(Hertz(10));
},
Some(1) => {
writeln!(uart, "Timer 1").unwrap();
timer1.start(Hertz(20));
},
Some(2) => {
writeln!(uart, "Timer 2").unwrap();
timer2.start(Hertz(30));
},
Some(3) => {
writeln!(uart, "Timer 3").unwrap();
timer3.start(Hertz(40));
},
Some(4) => {
writeln!(uart, "Timer 4").unwrap();
timer4.start(Hertz(50));
},
Some(5) => {
writeln!(uart, "Timer 5").unwrap();
timer5.start(Hertz(60));
},
Some(6) => {
writeln!(uart, "Timer 6").unwrap();
timer6.start(Hertz(70));
},
Some(7) => {
writeln!(uart, "Timer 7").unwrap();
timer7.start(Hertz(80));
},
_ => {},
}
}
}
#[exception]
fn HardFault(ef: &ExceptionFrame) -> ! {
panic!("HardFault at {:#?}", ef);
}
#[exception]
fn DefaultHandler(irqn: i16) {
panic!("Unhandled exception (IRQn = {})", irqn);
}
timer_interrupt_macro! {
TIMER0A, TIMER0, 0;
TIMER1A, TIMER1, 1;
TIMER2A, TIMER2, 2;
TIMER3A, TIMER3, 3;
TIMER4A, TIMER4, 4;
TIMER5A, TIMER5, 5;
TIMER6A, TIMER6, 6;
TIMER7A, TIMER7, 7;
}
|
use rocket;
use config;
use woltlab_auth_helper;
mod routes;
pub mod users;
pub mod file_entries;
pub mod file_entry_types;
pub mod acl;
pub fn get_auth_routes() -> Vec<rocket::Route> {
routes![
routes::woltlab_login
]
}
pub fn get_user_routes() -> Vec<rocket::Route> {
routes![
routes::get_user,
routes::save_user_file_entry,
routes::list_user_file_entry_types,
routes::list_user_file_entries
]
} |
fn main() {
my_crate::greet(); // Calling from local crate
}
|
use crate::archives::Archives;
pub mod archives;
fn main() -> std::io::Result<()> {
let start = std::time::SystemTime::now();
let archives = Archives::new(r"C:\Program Files (x86)\Steam\steamapps\common\Destiny 2\".to_string());
/*for pkg in archives.packages {
let id = pkg.1.id;
let count = pkg.1.patches.len();
println!("0x0{:X} -- {} -- {}", id, count, &pkg.1.description);
}*/
println!("Took {:?}ms", std::time::SystemTime::now().duration_since(start).expect("failed").as_millis());
let pack = (&archives.packages).get(&0x03FF).unwrap();
let data = pack.extract_entry(*pack.get_entries().get(1).unwrap());
std::fs::write(r"D:\sample.wem", data).unwrap();
Ok(())
}
|
use crate::db::HashValueDb;
use crate::errors::MerkleTreeError;
use crate::hasher::Arity2Hasher;
use crate::types::LeafIndex;
use std::marker::PhantomData;
// Following idea described here https://ethresear.ch/t/optimizing-sparse-merkle-trees/3751
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NodeType<H> {
Path(Vec<u8>),
SubtreeHash(H),
}
/// The types `D`, `H` and `MTH` correspond to the types of data, hash and merkle tree hasher
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BinarySparseMerkleTree<D: Clone, H: Clone, MTH>
where
MTH: Arity2Hasher<D, H>,
{
pub depth: usize,
pub root: H,
pub hasher: MTH,
/// `empty_tree_hashes` contains the hashes of empty subtrees at each level.
/// The 1st element is the root hash when all subtrees are empty and last element is the hash
/// of the empty leaf
pub empty_subtree_hashes: Vec<H>,
pub phantom: PhantomData<D>,
}
impl<D: Clone, H: Clone + PartialEq, MTH> BinarySparseMerkleTree<D, H, MTH>
where
MTH: Arity2Hasher<D, H>,
{
pub fn new(
empty_leaf_val: D,
hasher: MTH,
depth: usize,
) -> Result<BinarySparseMerkleTree<D, H, MTH>, MerkleTreeError> {
assert!(depth > 0);
let mut cur_hash = hasher.hash_leaf_data(empty_leaf_val)?;
let mut empty_subtree_hashes = vec![];
empty_subtree_hashes.insert(0, cur_hash);
for i in 0..depth {
cur_hash = hasher.hash_tree_nodes(
empty_subtree_hashes[i].clone(),
empty_subtree_hashes[i].clone(),
)?;
empty_subtree_hashes.insert(0, cur_hash.clone());
}
Ok(BinarySparseMerkleTree {
depth,
root: empty_subtree_hashes[0].clone(),
hasher,
empty_subtree_hashes,
phantom: PhantomData,
})
}
pub fn update(
&mut self,
idx: &dyn LeafIndex,
val: D,
hash_db: &mut dyn HashValueDb<H, (NodeType<H>, H)>,
) -> Result<(), MerkleTreeError> {
let path = idx.to_leaf_path(2, self.depth);
let hash = self.hasher.hash_leaf_data(val)?;
let new_root = self._update(path, hash, self.root.clone(), 0, hash_db)?;
self.root = new_root;
Ok(())
}
pub fn get(
&self,
idx: &dyn LeafIndex,
proof: &mut Option<Vec<(NodeType<H>, H)>>,
hash_db: &dyn HashValueDb<H, (NodeType<H>, H)>,
) -> Result<H, MerkleTreeError> {
let mut path = idx.to_leaf_path(2, self.depth);
let mut cur_node = self.root.clone();
let need_proof = proof.is_some();
let mut proof_vec = Vec::<(NodeType<H>, H)>::new();
for i in 0..self.depth {
if cur_node == self.empty_subtree_hashes[i] {
// Subtree under `cur_node` is empty, so return hash of the empty leaf which is
// the last element of `empty_subtree_hashes`
cur_node = self.empty_subtree_hashes[self.depth].clone();
break;
}
let children = hash_db.get(&cur_node)?;
if need_proof {
proof_vec.push(children.clone());
}
let (left_child, right_child) = children;
match left_child {
NodeType::Path(right_child_path) => {
if path == right_child_path {
cur_node = right_child;
break;
} else {
// No non empty leaf in the tree with this `path`, so return hash of the
// empty leaf which is the last element of `empty_subtree_hashes`
cur_node = self.empty_subtree_hashes[self.depth].clone();
break;
}
}
NodeType::SubtreeHash(left_subtree_hash) => {
if path[0] == 1 {
// Check right subtree
cur_node = right_child;
} else {
// Check left subtree
cur_node = left_subtree_hash;
}
}
}
path.remove(0);
}
match proof {
Some(v) => {
v.append(&mut proof_vec);
}
None => (),
}
Ok(cur_node)
}
pub fn verify_proof(
&self,
idx: &dyn LeafIndex,
val: D,
proof: Vec<(NodeType<H>, H)>,
) -> Result<bool, MerkleTreeError> {
if self.root == self.empty_subtree_hashes[0] {
return Ok(proof.len() == 0);
}
let leaf_hash = self.hasher.hash_leaf_data(val)?;
let mut path = idx.to_leaf_path(2, self.depth);
let proof_len = proof.len();
let mut subtree_root_hash = self.root.clone();
for (left_child, right_child) in proof {
match left_child {
NodeType::Path(right_child_path) => {
if path == right_child_path {
return Ok(right_child == leaf_hash);
} else {
// No non empty leaf with this path, the leaf hash should be the hash of the empty leaf
return Ok(self.empty_subtree_hashes[0] == leaf_hash);
}
}
NodeType::SubtreeHash(left_subtree_hash) => {
let expected_hash = self
.hasher
.hash_tree_nodes(left_subtree_hash.clone(), right_child.clone())?;
if expected_hash != subtree_root_hash {
return Ok(false);
}
if path[0] == 1 {
// Check right subtree
subtree_root_hash = right_child
} else {
// Check left subtree
subtree_root_hash = left_subtree_hash;
}
}
}
path.remove(0);
}
if proof_len == self.depth {
Ok(subtree_root_hash == leaf_hash)
} else {
Ok(self.empty_subtree_hashes[0] == leaf_hash)
}
}
fn _update(
&mut self,
mut path: Vec<u8>,
val: H,
root: H,
depth: usize,
hash_db: &mut dyn HashValueDb<H, (NodeType<H>, H)>,
) -> Result<H, MerkleTreeError> {
if depth == self.depth {
return Ok(val);
}
if root == self.empty_subtree_hashes[depth] {
// Update an empty subtree: make a single-val subtree
let new_root = self.update_empty_subtree(path.clone(), val.clone(), depth)?;
hash_db.put(new_root.clone(), (NodeType::Path(path), val))?;
Ok(new_root)
} else {
let (left_child, right_child) = hash_db.get(&root)?;
match left_child {
NodeType::Path(right_child_path) => self.update_one_val_subtree(
path,
val,
right_child_path,
right_child,
depth,
hash_db,
),
NodeType::SubtreeHash(left_subtree_hash) => {
if path[0] == 1 {
// New value lies in right subtree so update right subtree
path.remove(0);
let new_right = self._update(path, val, right_child, depth + 1, hash_db)?;
let root = self
.hasher
.hash_tree_nodes(left_subtree_hash.clone(), new_right.clone())?;
hash_db.put(
root.clone(),
(NodeType::SubtreeHash(left_subtree_hash), new_right),
)?;
Ok(root)
} else {
// New value lies in left subtree so update left subtree
path.remove(0);
let new_left =
self._update(path, val, left_subtree_hash, depth + 1, hash_db)?;
let root = self
.hasher
.hash_tree_nodes(new_left.clone(), right_child.clone())?;
hash_db
.put(root.clone(), (NodeType::SubtreeHash(new_left), right_child))?;
Ok(root)
}
}
}
}
}
/// Update subtree with 1 non-empty leaf, result will be creation of 2 subtrees, each with 1
/// non-empty leaf unless the same non empty leaf is being updated. Save intermediate nodes in the DB
fn update_one_val_subtree(
&mut self,
mut path_for_new_key: Vec<u8>,
val_for_new_key: H,
mut path_for_old_key: Vec<u8>,
val_for_old_key: H,
depth: usize,
hash_db: &mut dyn HashValueDb<H, (NodeType<H>, H)>,
) -> Result<H, MerkleTreeError> {
if path_for_new_key == path_for_old_key {
// The path being updated is same as the existing path, this is the case of updating value
// of an existing key so the resulting subtree has size a single non empty leaf.
let new_root = self.update_empty_subtree(
path_for_new_key.clone(),
val_for_new_key.clone(),
depth,
)?;
hash_db.put(
new_root.clone(),
(NodeType::Path(path_for_new_key), val_for_new_key),
)?;
return Ok(new_root);
}
let (left, right) = {
if path_for_new_key[0] == 1 {
// MSB is set, new value lies in right subtree
if path_for_old_key[0] == 1 {
// Existing value is in right subtree, hence left subtree is empty
path_for_new_key.remove(0);
path_for_old_key.remove(0);
(
self.empty_subtree_hashes[depth + 1].clone(),
self.update_one_val_subtree(
path_for_new_key,
val_for_new_key,
path_for_old_key,
val_for_old_key,
depth + 1,
hash_db,
)?,
)
} else {
// Existing value is in left subtree, create 2 subtrees with 1 value each
path_for_new_key.remove(0);
path_for_old_key.remove(0);
let left_subtree_hash = self.update_empty_subtree(
path_for_old_key.clone(),
val_for_old_key.clone(),
depth + 1,
)?;
let right_subtree_hash = self.update_empty_subtree(
path_for_new_key.clone(),
val_for_new_key.clone(),
depth + 1,
)?;
hash_db.put(
left_subtree_hash.clone(),
(NodeType::Path(path_for_old_key), val_for_old_key),
)?;
hash_db.put(
right_subtree_hash.clone(),
(NodeType::Path(path_for_new_key), val_for_new_key),
)?;
(left_subtree_hash, right_subtree_hash)
}
} else {
// MSB is unset, new value lies in left subtree
if path_for_old_key[0] == 1 {
// Existing value is in right subtree, create 2 subtrees with 1 value each
path_for_new_key.remove(0);
path_for_old_key.remove(0);
let left_subtree_hash = self.update_empty_subtree(
path_for_new_key.clone(),
val_for_new_key.clone(),
depth + 1,
)?;
let right_subtree_hash = self.update_empty_subtree(
path_for_old_key.clone(),
val_for_old_key.clone(),
depth + 1,
)?;
hash_db.put(
right_subtree_hash.clone(),
(NodeType::Path(path_for_old_key), val_for_old_key),
)?;
hash_db.put(
left_subtree_hash.clone(),
(NodeType::Path(path_for_new_key), val_for_new_key),
)?;
(left_subtree_hash, right_subtree_hash)
} else {
// Existing value is in left subtree, hence right subtree is empty
path_for_new_key.remove(0);
path_for_old_key.remove(0);
(
self.update_one_val_subtree(
path_for_new_key,
val_for_new_key,
path_for_old_key,
val_for_old_key,
depth + 1,
hash_db,
)?,
self.empty_subtree_hashes[depth + 1].clone(),
)
}
}
};
let root = self.hasher.hash_tree_nodes(left.clone(), right.clone())?;
hash_db.put(root.clone(), (NodeType::SubtreeHash(left), right))?;
Ok(root)
}
/// Make a root hash of a (sub)tree with a single key/value pair from empty tree
fn update_empty_subtree(
&self,
mut path: Vec<u8>,
val: H,
depth: usize,
) -> Result<H, MerkleTreeError> {
if depth == self.depth {
return Ok(val);
}
let (l, r) = {
if path[0] == 1 {
// MSB is set, descend in right subtree and hash the result with empty left subtree
path.remove(0);
(
self.empty_subtree_hashes[depth + 1].clone(),
self.update_empty_subtree(path, val, depth + 1)?,
)
} else {
// MSB is unset, descend in left subtree and hash the result with empty right subtree
path.remove(0);
(
self.update_empty_subtree(path, val, depth + 1)?,
self.empty_subtree_hashes[depth + 1].clone(),
)
}
};
self.hasher.hash_tree_nodes(l, r)
}
}
#[allow(non_snake_case)]
#[cfg(test)]
mod tests {
use super::*;
use crate::sha2::Sha256;
extern crate mimc_rs;
extern crate rand;
use self::rand::{thread_rng, Rng};
use crate::db::{InMemoryBigUintHashDb, InMemoryHashValueDb};
use crate::hasher::mimc_hash::MiMCHasher;
use crate::hasher::Sha256Hasher;
use num_bigint::{BigUint, RandBigInt};
use num_traits::Pow;
use std::collections::HashSet;
#[test]
fn test_binary_tree_sha256_string_repeat_vals() {
let mut db = InMemoryHashValueDb::<(NodeType<Vec<u8>>, Vec<u8>)>::new();
let tree_depth = 3;
let max_leaves = 2u64.pow(tree_depth as u32);
// Choice of `empty_leaf_val` is arbitrary
let empty_leaf_val = "";
let hasher = Sha256Hasher {
leaf_data_domain_separator: 0,
node_domain_separator: 1,
};
let mut tree =
BinarySparseMerkleTree::new(empty_leaf_val.clone(), hasher.clone(), tree_depth)
.unwrap();
let empty_leaf_hash = Arity2Hasher::hash_leaf_data(&hasher, empty_leaf_val).unwrap();
for i in 0..max_leaves {
assert_eq!(tree.get(&i, &mut None, &db).unwrap(), empty_leaf_hash);
}
let mut data = vec![];
for i in 0..max_leaves {
let val = [String::from("val_"), i.to_string()].concat();
let hash = Arity2Hasher::hash_leaf_data(&hasher, &val).unwrap();
data.push((i, val, hash));
}
tree.update(&data[0].0, &data[0].1, &mut db).unwrap();
// Update subtree with 1 value
tree.update(&data[0].0, &data[1].1, &mut db).unwrap();
assert_eq!(tree.get(&0, &mut None, &db).unwrap(), data[1].2);
tree.update(&data[2].0, &data[2].1, &mut db).unwrap();
// Update subtree with 1 value
tree.update(&data[0].0, &data[3].1, &mut db).unwrap();
assert_eq!(tree.get(&0, &mut None, &db).unwrap(), data[3].2);
}
#[test]
fn test_binary_tree_sha256_string_updating_existing_keys() {
// Update keys already present in the tree
let mut db = InMemoryHashValueDb::<(NodeType<Vec<u8>>, Vec<u8>)>::new();
let tree_depth = 10;
let max_leaves = 2u64.pow(tree_depth as u32);
let empty_leaf_val = "";
let hasher = Sha256Hasher {
leaf_data_domain_separator: 0,
node_domain_separator: 1,
};
let mut tree =
BinarySparseMerkleTree::new(empty_leaf_val.clone(), hasher.clone(), tree_depth)
.unwrap();
let empty_leaf_hash = Arity2Hasher::hash_leaf_data(&hasher, empty_leaf_val).unwrap();
for i in 0..max_leaves {
assert_eq!(tree.get(&i, &mut None, &db).unwrap(), empty_leaf_hash);
}
let test_cases = 300;
let mut rng = thread_rng();
let mut data = vec![];
for _ in 0..test_cases {
let i: u64 = rng.gen_range(0, max_leaves);
let val = [String::from("val_"), i.to_string()].concat();
let hash = Arity2Hasher::hash_leaf_data(&hasher, &val).unwrap();
data.push((i, val, hash));
}
for i in 0..test_cases {
let idx = &data[i as usize].0;
tree.update(idx, &data[i as usize].1, &mut db).unwrap();
let mut proof_vec = Vec::<(NodeType<Vec<u8>>, Vec<u8>)>::new();
let mut proof = Some(proof_vec);
assert_eq!(tree.get(idx, &mut proof, &db).unwrap(), data[i as usize].2);
proof_vec = proof.unwrap();
assert!(tree
.verify_proof(idx, &data[i as usize].1, proof_vec.clone())
.unwrap());
}
for i in 0..test_cases {
let idx = &data[i as usize].0;
assert_eq!(tree.get(idx, &mut None, &db).unwrap(), data[i as usize].2);
}
}
fn check_update_get<'a, C, I, T>(test_cases: usize, mut index_func: C, tree: &'a mut BinarySparseMerkleTree<&'a str, Vec<u8>,
Sha256Hasher>, data: &'a mut Vec<(I, String, Vec<u8>)>, hasher: Sha256Hasher, db: &mut T) where
T: HashValueDb<Vec<u8>, (NodeType<Vec<u8>>, Vec<u8>)>, C: FnMut() -> I,
I: LeafIndex + std::hash::Hash + std::cmp::Eq + std::string::ToString + Clone + 'a {
// `data` will have unique indices
let mut set = HashSet::new();
while data.len() < test_cases {
let i = index_func();
if set.contains(&i) {
continue;
} else {
set.insert(i.clone());
}
let val = [String::from("val_"), i.to_string()].concat();
let hash = Arity2Hasher::hash_leaf_data(&hasher, &val).unwrap();
data.push((i, val, hash));
}
for i in 0..test_cases {
let idx = &data[i as usize].0;
tree.update(idx, &data[i as usize].1, db).unwrap();
let mut proof_vec = Vec::<(NodeType<Vec<u8>>, Vec<u8>)>::new();
let mut proof = Some(proof_vec);
assert_eq!(tree.get(idx, &mut proof, db).unwrap(), data[i as usize].2);
proof_vec = proof.unwrap();
assert!(tree
.verify_proof(idx, &data[i as usize].1, proof_vec.clone())
.unwrap());
}
for i in 0..test_cases {
let idx = &data[i as usize].0;
assert_eq!(tree.get(idx, &mut None, db).unwrap(), data[i as usize].2);
}
}
#[test]
fn test_binary_tree_sha256_string() {
let mut db = InMemoryHashValueDb::<(NodeType<Vec<u8>>, Vec<u8>)>::new();
let tree_depth = 10;
let max_leaves = 2u64.pow(tree_depth as u32);
let empty_leaf_val = "";
let hasher = Sha256Hasher {
leaf_data_domain_separator: 0,
node_domain_separator: 1,
};
let mut tree =
BinarySparseMerkleTree::new(empty_leaf_val.clone(), hasher.clone(), tree_depth)
.unwrap();
let empty_leaf_hash = Arity2Hasher::hash_leaf_data(&hasher, empty_leaf_val).unwrap();
for i in 0..max_leaves {
assert_eq!(tree.get(&i, &mut None, &db).unwrap(), empty_leaf_hash);
}
let mut data = Vec::<(u64, String, Vec<u8>)>::new();
let test_cases = 300;
let mut rng = thread_rng();
let index_func = || rng.gen_range(0, max_leaves);
check_update_get(test_cases, index_func, &mut tree, &mut data, hasher, &mut db);
}
#[test]
fn test_binary_tree_sha256_string_BigUint_index() {
let mut db = InMemoryHashValueDb::<(NodeType<Vec<u8>>, Vec<u8>)>::new();
let tree_depth = 100;
let empty_leaf_val = "";
let hasher = Sha256Hasher {
leaf_data_domain_separator: 0,
node_domain_separator: 1,
};
let mut tree =
BinarySparseMerkleTree::new(empty_leaf_val.clone(), hasher.clone(), tree_depth)
.unwrap();
let mut data = Vec::<(BigUint, String, Vec<u8>)>::new();
let test_cases = 1000;
let mut rng = thread_rng();
let index_func = || rng.gen_biguint(160);
check_update_get(test_cases, index_func, &mut tree, &mut data, hasher, &mut db);
}
#[test]
fn test_binary_tree_mimc_BigUint() {
let mut db = InMemoryBigUintHashDb::<(NodeType<BigUint>, BigUint)>::new();
let tree_depth = 10;
let empty_leaf_val = BigUint::from(0u64);
let hasher = MiMCHasher::new(
BigUint::from(1u64),
BigUint::from(2u64),
BigUint::from(3u64),
);
let mut tree =
BinarySparseMerkleTree::new(empty_leaf_val.clone(), hasher.clone(), tree_depth)
.unwrap();
let mut data = vec![];
let test_cases = 100;
let mut rng = thread_rng();
let mut set = HashSet::new();
while data.len() < test_cases {
let i = rng.gen_biguint(160);
if set.contains(&i) {
continue;
} else {
set.insert(i.clone());
}
let val: BigUint = rng.gen_biguint(200);
let hash = Arity2Hasher::hash_leaf_data(&hasher, val.clone()).unwrap();
data.push((i.clone(), val, hash));
}
for i in 0..test_cases {
let idx = &data[i as usize].0;
tree.update(idx, data[i as usize].1.clone(), &mut db)
.unwrap();
assert_eq!(tree.get(idx, &mut None, &db).unwrap(), data[i as usize].2);
let mut proof_vec = Vec::<(NodeType<BigUint>, BigUint)>::new();
let mut proof = Some(proof_vec);
assert_eq!(tree.get(idx, &mut proof, &db).unwrap(), data[i as usize].2);
proof_vec = proof.unwrap();
assert!(tree
.verify_proof(idx, data[i as usize].1.clone(), proof_vec.clone())
.unwrap());
}
}
}
|
use bintree::Tree;
use std::fmt;
pub fn is_symmetric<T: fmt::Display>(tree: &Tree<T>) -> bool {
fn is_mirror_of<T: fmt::Display>(t1: &Tree<T>, t2: &Tree<T>) -> bool {
match (t1, t2) {
(Tree::End, Tree::End) => true,
(Tree::End, Tree::Node { .. }) => false,
(Tree::Node { .. }, Tree::End) => false,
(
Tree::Node {
value: _,
left: lleft,
right: lright,
},
Tree::Node {
value: _,
left: rleft,
right: rright,
},
) => is_mirror_of(lleft, rright) && is_mirror_of(lright, rleft),
}
}
match tree {
Tree::Node {
value: _,
left,
right,
} => is_mirror_of(left, right),
Tree::End => true,
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_symmetric_end() {
assert_eq!(is_symmetric(&Tree::<char>::end()), true);
}
#[test]
fn test_is_symmetric_leaf() {
assert_eq!(is_symmetric(&Tree::leaf('a')), true);
}
#[test]
fn test_is_symmetric() {
assert_eq!(
is_symmetric(&Tree::node('a', Tree::leaf('b'), Tree::leaf('c'))),
true
);
assert_eq!(
is_symmetric(&Tree::node('a', Tree::leaf('b'), Tree::end())),
false
);
assert_eq!(
is_symmetric(&Tree::node('a', Tree::end(), Tree::leaf('c'))),
false
);
}
#[test]
fn test_is_symmetric_complex() {
let tree = Tree::node(
'a',
Tree::node(
'b',
Tree::node('d', Tree::end(), Tree::leaf('e')),
Tree::end(),
),
Tree::node(
'c',
Tree::end(),
Tree::node('f', Tree::leaf('g'), Tree::end()),
),
);
assert_eq!(is_symmetric(&tree), true);
let tree = Tree::node(
'a',
Tree::node('b', Tree::leaf('d'), Tree::leaf('e')),
Tree::node(
'c',
Tree::end(),
Tree::node('f', Tree::leaf('g'), Tree::end()),
),
);
assert_eq!(is_symmetric(&tree), false);
}
}
|
use regex::Regex;
pub struct Network<'a> {
pub name: &'a str,
pub signal: &'a str,
pub encrypted: bool,
}
impl<'a> Network<'a> {
pub fn new(name: &'a str, signal: &'a str, encrypted: bool) -> Network<'a> {
Network {
name: name,
signal: signal,
encrypted: encrypted
}
}
pub fn from_scan(scan: &str) -> Network {
let re = Regex::new(r#"(?ms)Quality=(?P<quality>.*?)\sSignal level=(?P<signal>.*?)\n.*?Encryption key:(?P<key>.*?)\n.*?ESSID:"(?P<essid>.*?)".*?"#).unwrap();
let caps = re.captures(&scan).unwrap();
let mut key = false;
if caps.name("key").unwrap() == "on" {
key = true;
}
Network::new(caps.name("essid").unwrap(),
caps.name("signal").unwrap(),
key)
}
}
impl<'a> PartialEq for Network<'a> {
fn eq(&self, other: &Network) -> bool {
if self.name == other.name {
return true
}
false
}
}
|
//! Impl block has type parameters.
use near_bindgen::near_bindgen;
use borsh::{BorshDeserialize, BorshSerialize};
use std::marker::PhantomData;
#[near_bindgen]
#[derive(Default, BorshDeserialize, BorshSerialize)]
struct Incrementer<T> {
value: u32,
data: PhantomData<T>,
}
#[near_bindgen]
impl<'a, T: 'a + std::fmt::Display> Incrementer<T> {
pub fn inc(&mut self, by: u32) {
self.value += by;
}
}
fn main() {}
|
//! A proxy that forwards data to another server and forwards that server's
//! responses back to clients.
//!
//! Because the Tokio runtime uses a thread pool, each TCP connection is
//! processed concurrently with all other TCP connections across multiple
//! threads.
//!
//! You can showcase this by running this in one terminal:
//!
//! cargo run --example proxy
//!
//! This in another terminal
//!
//! cargo run --example echo
//!
//! And finally this in another terminal
//!
//! cargo run --example connect 127.0.0.1:8081
//!
//! This final terminal will connect to our proxy, which will in turn connect to
//! the echo server, and you'll be able to see data flowing between them.
#![warn(rust_2018_idioms)]
extern crate num;
#[macro_use]
extern crate num_derive;
mod memcached_codec;
mod protocol;
use futures::{SinkExt, StreamExt};
use tokio::io;
use tokio::net::{TcpListener, TcpStream};
use tokio_util::codec::{Framed, LinesCodec};
use crate::memcached_codec::{MemcachedBinaryCodec, MemcachedBinaryCodecError};
use crate::protocol::memcached_binary::{Magic, PacketHeader};
use bytes::Bytes;
use futures::future::{try_join, IntoFuture};
use futures::FutureExt;
use std::env;
use std::error::Error;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
let listen_addr = env::args()
.nth(1)
.unwrap_or_else(|| "127.0.0.1:8080".to_string());
let server_addr = env::args()
.nth(2)
.unwrap_or_else(|| "10.236.107.20:11220".to_string());
println!("Listening on: {}", listen_addr);
println!("Proxying to: {}", server_addr);
let mut listener = TcpListener::bind(listen_addr).await?;
while let Ok((inbound, _)) = listener.accept().await {
let transfer = transfer(inbound, server_addr.clone()).map(|r| {
if let Err(e) = r {
println!("Failed to transfer; error={}", e);
}
});
tokio::spawn(transfer);
}
Ok(())
}
async fn transfer(mut inbound: TcpStream, proxy_addr: String) -> Result<(), Box<dyn Error>> {
let mut transport: Framed<TcpStream, MemcachedBinaryCodec> =
Framed::new(inbound, MemcachedBinaryCodec::new());
while let Some(result) =
transport.next().await as Option<Result<PacketHeader, MemcachedBinaryCodecError>>
{
match result {
Ok(requestt) => {
let mut request = requestt.clone();
request.magic = 0x81;
request.extras_length = 0;
request.total_body_length = 0;
request.key_length = 0;
request.vbucket_id_or_status = 0;
transport.send(request).await.unwrap();
}
Err(e) => {
println!("error on decoding from socket; error = {:?}", e);
}
}
}
//let mut outbound = TcpStream::connect(proxy_addr).await?;
//let (mut ri, mut wi) = inbound.split();
//let (mut ro, mut wo) = outbound.split();
//let client_to_server = io::copy(&mut ri, &mut wo);
//let server_to_client = io::copy(&mut ro, &mut wi);
//try_join(client_to_server, server_to_client).await?;
Ok(())
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! AccountHandler manages the state of a single Fuchsia account and its personae on a Fuchsia
//! device, and provides access to authentication tokens for Service Provider accounts associated
//! with the Fuchsia account.
#![deny(warnings)]
#![deny(missing_docs)]
#![feature(async_await, await_macro, futures_api)]
mod account;
mod account_handler;
mod auth_provider_supplier;
mod persona;
#[cfg(test)]
mod test_util;
use crate::account_handler::AccountHandler;
use failure::{Error, ResultExt};
use fidl::endpoints::{RequestStream, ServiceMarker};
use fidl_fuchsia_auth_account_internal::{
AccountHandlerControlMarker, AccountHandlerControlRequestStream,
};
use fuchsia_app::server::ServicesServer;
use fuchsia_async as fasync;
use log::{error, info};
use std::sync::Arc;
type TokenManager = token_manager::TokenManager<auth_provider_supplier::AuthProviderSupplier>;
fn main() -> Result<(), Error> {
fuchsia_syslog::init_with_tags(&["auth"]).expect("Can't init logger");
info!("Starting account handler");
let mut executor = fasync::Executor::new().context("Error creating executor")?;
let account_handler = Arc::new(AccountHandler::new());
let fut = ServicesServer::new()
.add_service((AccountHandlerControlMarker::NAME, move |chan| {
let account_handler_clone = Arc::clone(&account_handler);
fasync::spawn(
async move {
let stream = AccountHandlerControlRequestStream::from_channel(chan);
await!(account_handler_clone.handle_requests_from_stream(stream))
.unwrap_or_else(|e| {
error!("Error handling AccountHandlerControl channel {:?}", e)
})
},
);
}))
.start()
.context("Error starting AccountHandlerControl server")?;
executor.run_singlethreaded(fut).context("Failed to execute AccountHandlerControl future")?;
info!("Stopping account handler");
Ok(())
}
|
extern crate serde_json;
use std::fs::File;
use std::io::Read;
use std::env;
fn main() {
println!("Hello, world!");
let args: Vec<_> = env::args().collect();
if args.len() != 2 {
println!("Usage: {} file.json", args[0]);
std::process::exit(1);
}
let ref path = args[1];
let mut s = String::new();
let mut f = File::open(path).expect("Unable to open file");
//f.read_to_string(&mut s).expect("Unable to read string");
//println!("{}", s);
match f.read_to_string(&mut s) {
Err(_) => std::process::exit(1),
Ok(_) => println!("{}", s),
}
match serde_json::from_str::<serde_json::Value>(&s) {
Ok(_) => std::process::exit(0),
Err(_) => std::process::exit(1)
};
}
|
use glib::translate::*;
use javascriptcore_sys::*;
use std::ops::{Deref, DerefMut};
use crate::GlobalContextRef;
pub struct RefWrapper {
context: JSGlobalContextRef,
value: JSValueRef,
}
unsafe fn unwrap(wrapper: *mut RefWrapper) -> JSValueRef {
(*wrapper).value
}
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ValueRef(Shared<RefWrapper>);
match fn {
ref => |ptr| {
if (*ptr).context.is_null() { return; }
javascriptcore_sys::JSValueProtect((*ptr).context, (*ptr).value)
},
unref => |ptr| {
if (*ptr).context.is_null() { return; }
javascriptcore_sys::JSValueUnprotect((*ptr).context, (*ptr).value)
},
}
}
impl ValueRef {
pub fn is_boolean(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsBoolean(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn is_null(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsNull(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn is_undefined(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsUndefined(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn is_number(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsNumber(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn is_string(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsString(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn is_object(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsObject(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn is_array(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsArray(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn is_date(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueIsDate(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn to_number(&self, context: &GlobalContextRef) -> Option<f64> {
let mut exception = std::ptr::null_mut();
let result = unsafe {
JSValueToNumber(
context.to_glib_none().0,
unwrap(self.to_glib_none().0),
&mut exception,
)
};
if exception.is_null() {
Some(result)
} else {
None
}
}
pub fn to_boolean(&self, context: &GlobalContextRef) -> bool {
unsafe { JSValueToBoolean(context.to_glib_none().0, unwrap(self.to_glib_none().0)) != 0 }
}
pub fn to_string(&self, context: &GlobalContextRef) -> Option<String> {
unsafe {
let mut exception = std::ptr::null_mut();
let jsstring = JSValueToStringCopy(
context.to_glib_none().0,
unwrap(self.to_glib_none().0),
&mut exception,
);
if exception.is_null() {
let cap = JSStringGetMaximumUTF8CStringSize(jsstring);
let mut buf = Vec::<u8>::with_capacity(cap);
let len = JSStringGetUTF8CString(jsstring, buf.as_mut_ptr() as _, cap);
JSStringRelease(jsstring);
buf.set_len(len - 1);
String::from_utf8(buf).ok()
} else {
None
}
}
}
}
// TODO: Delete these after switching all callers to NativeValueRef.
impl FromGlibPtrNone<JSValueRef> for ValueRef {
unsafe fn from_glib_none(ptr: JSValueRef) -> Self {
let mut wrapper = RefWrapper {
context: std::ptr::null_mut() as _,
value: ptr,
};
let pointer: *mut _ = &mut wrapper;
from_glib_none(pointer)
}
}
impl FromGlibPtrFull<JSValueRef> for ValueRef {
unsafe fn from_glib_full(ptr: JSValueRef) -> Self {
let mut wrapper = RefWrapper {
context: std::ptr::null_mut() as _,
value: ptr,
};
let pointer: *mut _ = &mut wrapper;
from_glib_full(pointer)
}
}
|
mod suit;
mod number;
pub use self::suit::Suit;
#[derive(Debug, PartialEq)]
pub struct Card {
pub number: u8,
pub suit: Suit
}
pub fn parse(card: &str) -> Card {
Card {
number: number::parse(card),
suit: suit::parse(card)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_parsers_string_to_card() {
assert_eq!(parse("2H"), Card { number: 2, suit: Suit::Hearts });
assert_eq!(parse("7S"), Card { number: 7, suit: Suit::Spades });
assert_eq!(parse("AD"), Card { number: 14, suit: Suit::Dimonds });
}
}
|
use crate::cartesian::*;
use crate::math::*;
#[derive(Hash, Eq, PartialEq, Debug, Copy, Clone)]
struct DimensionHolder {
position: i32,
velocity: i32,
}
#[derive(Hash, Eq, PartialEq, Debug, Copy, Clone)]
struct Moon {
location: Point,
velocity: Velocity,
}
impl Moon {
pub fn calculate_gravity_from(self, other_moon: &Self) -> Velocity {
let mut x = 0;
let mut y = 0;
let mut z = 0;
if (self.location.x < other_moon.location.x) {
x = 1;
} else if (self.location.x > other_moon.location.x) {
x = -1;
}
if (self.location.y < other_moon.location.y) {
y = 1;
} else if (self.location.y > other_moon.location.y) {
y = -1;
}
if (self.location.z < other_moon.location.z) {
z = 1;
} else if (self.location.z > other_moon.location.z) {
z = -1;
}
return Velocity::new(x, y, z);
}
fn new(location: Point, velocity: Velocity) -> Self {
return Self { location, velocity };
}
fn get_holder_for_dimension(self, dimension: Dimension) -> DimensionHolder {
if (dimension == Dimension::X) {
return DimensionHolder {
position: self.location.x,
velocity: self.velocity.x,
};
}
if (dimension == Dimension::Y) {
return DimensionHolder {
position: self.location.y,
velocity: self.velocity.y,
};
}
return DimensionHolder {
position: self.location.z,
velocity: self.velocity.z,
};
}
fn get_velocity_for_dimension(self, dimension: Dimension) -> i32 {
if (dimension == Dimension::X) {
return self.velocity.x;
}
if (dimension == Dimension::Y) {
return self.velocity.y;
}
return self.velocity.z;
}
}
fn calculate_moon_energy(moon: &Moon) -> i32 {
return moon.location.distance_from_origin() * moon.velocity.distance_from_origin();
}
fn parse_moons(strings: Vec<String>) -> Vec<Moon> {
return strings.iter()
.map(&Point::from_string)
.map(|point| -> Moon {
return Moon {
location: point.unwrap(),
velocity: ZERO_VELOCITY,
};
}).collect();
}
fn update_moons(moons: &mut Vec<Moon>) {
for moon_index in 0..moons.len() {
let moon = moons.get(moon_index).unwrap();
let mut new_velocity = moon.velocity;
for other_moon_index in 0..moons.len() {
if (other_moon_index != moon_index) {
let other_moon = moons.get(other_moon_index).unwrap();
let velocity = moon.calculate_gravity_from(&other_moon);
new_velocity = new_velocity.add(velocity);
}
}
moons.get_mut(moon_index).unwrap().velocity = new_velocity;
}
for moon in moons.iter_mut() {
moon.location = moon.location.move_velocity(&moon.velocity);
}
}
fn puzzle1(strings: Vec<String>, iterations: usize) -> i32 {
let mut moons = parse_moons(strings);
for index in 0..iterations {
update_moons(&mut moons);
}
return moons.iter()
.map(calculate_moon_energy)
.sum();
}
fn get_current_dimension(moons: &Vec<Moon>, dimension: &Dimension) -> Vec<DimensionHolder> {
return moons.iter()
.map(|moon| -> DimensionHolder {
return moon.get_holder_for_dimension(dimension.clone());
})
.collect();
}
fn get_velocities_for_dimension(moons: &Vec<Moon>, dimension: &Dimension) -> Vec<i32> {
return moons.iter()
.map(|moon| -> i32 {
return moon.get_velocity_for_dimension(dimension.clone());
})
.collect();
}
fn get_repeat_count(mut moons: Vec<Moon>, dimension: Dimension) -> u64 {
let mut moon_dimension_position = Vec::<Vec<DimensionHolder>>::new();
moon_dimension_position.push(get_current_dimension(&moons, &dimension));
loop {
update_moons(&mut moons);
let dimensions = get_current_dimension(&moons, &dimension);
if (moon_dimension_position.contains(&dimensions)) {
return (moon_dimension_position.len() * 2) as u64;
}
moon_dimension_position.push(dimensions);
}
}
fn get_repeat_count2(mut moons: Vec<Moon>, dimension: Dimension) -> u64 {
let final_velocities = get_current_dimension(&moons, &dimension);
let mut steps = 0u64;
loop {
update_moons(&mut moons);
steps += 1;
let velocities = get_current_dimension(&moons, &dimension);
if velocities == final_velocities {
return steps;
}
}
}
fn puzzle2(strings: Vec<String>) -> u64 {
let mut moons = parse_moons(strings);
let x_count = get_repeat_count2(moons.clone(), Dimension::X);
let y_count = get_repeat_count2(moons.clone(), Dimension::Y);
let z_count = get_repeat_count2(moons.clone(), Dimension::Z);
//x=36, y=56, z=88
println!("x={}, y={}, z={}", x_count, y_count, z_count);
return lcm3_unsigned(x_count, y_count, z_count);
}
#[cfg(test)]
mod tests {
use crate::cartesian::{ZERO_VELOCITY, Point};
use crate::day12::*;
use crate::utils;
#[test]
fn moon_calculate_gravity_from() {
let ganimete = Moon::new(
Point::new(3, 3, 3),
ZERO_VELOCITY,
);
let callisto = Moon::new(
Point::new(5, 5, 5),
ZERO_VELOCITY,
);
let asdf1 = ganimete.calculate_gravity_from(&callisto);
assert_eq!(asdf1.x, 1);
assert_eq!(asdf1.y, 1);
assert_eq!(asdf1.z, 1);
let asdf2 = callisto.calculate_gravity_from(&ganimete);
assert_eq!(asdf2.x, -1);
assert_eq!(asdf2.y, -1);
assert_eq!(asdf2.z, -1);
let asdf3 = ganimete.calculate_gravity_from(&ganimete);
assert_eq!(asdf3.x, 0);
assert_eq!(asdf3.y, 0);
assert_eq!(asdf3.z, 0);
}
struct Test1 {
input: Vec<String>,
iterations: usize,
expected: i32,
}
#[test]
fn test_puzzle1() {
let mut tests = Vec::new();
tests.push(Test1 {
input: vec![String::from("<x=-1, y=0, z=2>"),
String::from("<x=2, y=-10, z=-7>"),
String::from("<x=4, y=-8, z=8>"),
String::from("<x=3, y=5, z=-1>")],
iterations: 10,
expected: 179,
});
tests.push(Test1 {
input: vec![String::from("<x=-8, y=-10, z=0>"),
String::from("<x=5, y=5, z=10>"),
String::from("<x=2, y=-7, z=3>"),
String::from("<x=9, y=-8, z=-3>")],
iterations: 100,
expected: 1940,
});
let input = utils::read_lines("data/Day12.txt").unwrap();
tests.push(Test1 {
input: input,
iterations: 1000,
expected: 8742,
});
for test in tests {
let result = puzzle1(test.input, test.iterations);
assert_eq!(result, test.expected)
}
}
struct Test2 {
input: Vec<String>,
expected: u64,
}
#[test]
fn test_puzzle2() {
let mut tests = Vec::<Test2>::new();
tests.push(Test2 {
input: vec![String::from("<x=-1, y=0, z=2>"),
String::from("<x=2, y=-10, z=-7>"),
String::from("<x=4, y=-8, z=8>"),
String::from("<x=3, y=5, z=-1>")],
expected: 2772,
});
tests.push(Test2 {
input: vec![String::from("<x=-8, y=-10, z=0>"),
String::from("<x=5, y=5, z=10>"),
String::from("<x=2, y=-7, z=3>"),
String::from("<x=9, y=-8, z=-3>")],
expected: 4686774924
});
let input = utils::read_lines("data/Day12.txt").unwrap();
tests.push(Test2 {
input,
expected: 325433763467176,
});
for test in tests {
let result = puzzle2(test.input);
assert_eq!(result, test.expected)
}
}
} |
// auto generated, do not modify.
// created: Mon Feb 22 23:57:02 2016
// src-file: /QtGui/qaccessibleobject.h
// dst-file: /src/gui/qaccessibleobject.rs
//
// header block begin =>
#![feature(libc)]
#![feature(core)]
#![feature(collections)]
extern crate libc;
use self::libc::*;
// <= header block end
// main block begin =>
// <= main block end
// use block begin =>
// use super::qaccessibleobject::QAccessibleObject; // 773
use std::ops::Deref;
use super::qwindow::*; // 773
use super::qaccessible::*; // 773
use super::super::core::qstring::*; // 771
use super::super::core::qobject::*; // 771
use super::super::core::qrect::*; // 771
// <= use block end
// ext block begin =>
// #[link(name = "Qt5Core")]
// #[link(name = "Qt5Gui")]
// #[link(name = "Qt5Widgets")]
// #[link(name = "QtInline")]
extern {
fn QAccessibleApplication_Class_Size() -> c_int;
// proto: void QAccessibleApplication::QAccessibleApplication();
fn C_ZN22QAccessibleApplicationC2Ev() -> u64;
// proto: QWindow * QAccessibleApplication::window();
fn C_ZNK22QAccessibleApplication6windowEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QAccessibleInterface * QAccessibleApplication::child(int index);
fn C_ZNK22QAccessibleApplication5childEi(qthis: u64 /* *mut c_void*/, arg0: c_int) -> *mut c_void;
// proto: int QAccessibleApplication::childCount();
fn C_ZNK22QAccessibleApplication10childCountEv(qthis: u64 /* *mut c_void*/) -> c_int;
// proto: QAccessibleInterface * QAccessibleApplication::parent();
fn C_ZNK22QAccessibleApplication6parentEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QAccessibleInterface * QAccessibleApplication::focusChild();
fn C_ZNK22QAccessibleApplication10focusChildEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: int QAccessibleApplication::indexOfChild(const QAccessibleInterface * );
fn C_ZNK22QAccessibleApplication12indexOfChildEPK20QAccessibleInterface(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> c_int;
fn QAccessibleObject_Class_Size() -> c_int;
// proto: void QAccessibleObject::QAccessibleObject(QObject * object);
fn C_ZN17QAccessibleObjectC2EP7QObject(arg0: *mut c_void) -> u64;
// proto: QObject * QAccessibleObject::object();
fn C_ZNK17QAccessibleObject6objectEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QRect QAccessibleObject::rect();
fn C_ZNK17QAccessibleObject4rectEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: QAccessibleInterface * QAccessibleObject::childAt(int x, int y);
fn C_ZNK17QAccessibleObject7childAtEii(qthis: u64 /* *mut c_void*/, arg0: c_int, arg1: c_int) -> *mut c_void;
// proto: bool QAccessibleObject::isValid();
fn C_ZNK17QAccessibleObject7isValidEv(qthis: u64 /* *mut c_void*/) -> c_char;
} // <= ext block end
// body block begin =>
// class sizeof(QAccessibleApplication)=16
#[derive(Default)]
pub struct QAccessibleApplication {
qbase: QAccessibleObject,
pub qclsinst: u64 /* *mut c_void*/,
}
// class sizeof(QAccessibleObject)=16
#[derive(Default)]
pub struct QAccessibleObject {
qbase: QAccessibleInterface,
pub qclsinst: u64 /* *mut c_void*/,
}
impl /*struct*/ QAccessibleApplication {
pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QAccessibleApplication {
return QAccessibleApplication{qbase: QAccessibleObject::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
}
}
impl Deref for QAccessibleApplication {
type Target = QAccessibleObject;
fn deref(&self) -> &QAccessibleObject {
return & self.qbase;
}
}
impl AsRef<QAccessibleObject> for QAccessibleApplication {
fn as_ref(& self) -> & QAccessibleObject {
return & self.qbase;
}
}
// proto: void QAccessibleApplication::QAccessibleApplication();
impl /*struct*/ QAccessibleApplication {
pub fn new<T: QAccessibleApplication_new>(value: T) -> QAccessibleApplication {
let rsthis = value.new();
return rsthis;
// return 1;
}
}
pub trait QAccessibleApplication_new {
fn new(self) -> QAccessibleApplication;
}
// proto: void QAccessibleApplication::QAccessibleApplication();
impl<'a> /*trait*/ QAccessibleApplication_new for () {
fn new(self) -> QAccessibleApplication {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN22QAccessibleApplicationC2Ev()};
let ctysz: c_int = unsafe{QAccessibleApplication_Class_Size()};
let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64;
let qthis: u64 = unsafe {C_ZN22QAccessibleApplicationC2Ev()};
let rsthis = QAccessibleApplication{qbase: QAccessibleObject::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
return rsthis;
// return 1;
}
}
// proto: QWindow * QAccessibleApplication::window();
impl /*struct*/ QAccessibleApplication {
pub fn window<RetType, T: QAccessibleApplication_window<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.window(self);
// return 1;
}
}
pub trait QAccessibleApplication_window<RetType> {
fn window(self , rsthis: & QAccessibleApplication) -> RetType;
}
// proto: QWindow * QAccessibleApplication::window();
impl<'a> /*trait*/ QAccessibleApplication_window<QWindow> for () {
fn window(self , rsthis: & QAccessibleApplication) -> QWindow {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK22QAccessibleApplication6windowEv()};
let mut ret = unsafe {C_ZNK22QAccessibleApplication6windowEv(rsthis.qclsinst)};
let mut ret1 = QWindow::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAccessibleInterface * QAccessibleApplication::child(int index);
impl /*struct*/ QAccessibleApplication {
pub fn child<RetType, T: QAccessibleApplication_child<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.child(self);
// return 1;
}
}
pub trait QAccessibleApplication_child<RetType> {
fn child(self , rsthis: & QAccessibleApplication) -> RetType;
}
// proto: QAccessibleInterface * QAccessibleApplication::child(int index);
impl<'a> /*trait*/ QAccessibleApplication_child<QAccessibleInterface> for (i32) {
fn child(self , rsthis: & QAccessibleApplication) -> QAccessibleInterface {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK22QAccessibleApplication5childEi()};
let arg0 = self as c_int;
let mut ret = unsafe {C_ZNK22QAccessibleApplication5childEi(rsthis.qclsinst, arg0)};
let mut ret1 = QAccessibleInterface::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: int QAccessibleApplication::childCount();
impl /*struct*/ QAccessibleApplication {
pub fn childCount<RetType, T: QAccessibleApplication_childCount<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.childCount(self);
// return 1;
}
}
pub trait QAccessibleApplication_childCount<RetType> {
fn childCount(self , rsthis: & QAccessibleApplication) -> RetType;
}
// proto: int QAccessibleApplication::childCount();
impl<'a> /*trait*/ QAccessibleApplication_childCount<i32> for () {
fn childCount(self , rsthis: & QAccessibleApplication) -> i32 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK22QAccessibleApplication10childCountEv()};
let mut ret = unsafe {C_ZNK22QAccessibleApplication10childCountEv(rsthis.qclsinst)};
return ret as i32; // 1
// return 1;
}
}
// proto: QAccessibleInterface * QAccessibleApplication::parent();
impl /*struct*/ QAccessibleApplication {
pub fn parent<RetType, T: QAccessibleApplication_parent<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.parent(self);
// return 1;
}
}
pub trait QAccessibleApplication_parent<RetType> {
fn parent(self , rsthis: & QAccessibleApplication) -> RetType;
}
// proto: QAccessibleInterface * QAccessibleApplication::parent();
impl<'a> /*trait*/ QAccessibleApplication_parent<QAccessibleInterface> for () {
fn parent(self , rsthis: & QAccessibleApplication) -> QAccessibleInterface {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK22QAccessibleApplication6parentEv()};
let mut ret = unsafe {C_ZNK22QAccessibleApplication6parentEv(rsthis.qclsinst)};
let mut ret1 = QAccessibleInterface::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAccessibleInterface * QAccessibleApplication::focusChild();
impl /*struct*/ QAccessibleApplication {
pub fn focusChild<RetType, T: QAccessibleApplication_focusChild<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.focusChild(self);
// return 1;
}
}
pub trait QAccessibleApplication_focusChild<RetType> {
fn focusChild(self , rsthis: & QAccessibleApplication) -> RetType;
}
// proto: QAccessibleInterface * QAccessibleApplication::focusChild();
impl<'a> /*trait*/ QAccessibleApplication_focusChild<QAccessibleInterface> for () {
fn focusChild(self , rsthis: & QAccessibleApplication) -> QAccessibleInterface {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK22QAccessibleApplication10focusChildEv()};
let mut ret = unsafe {C_ZNK22QAccessibleApplication10focusChildEv(rsthis.qclsinst)};
let mut ret1 = QAccessibleInterface::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: int QAccessibleApplication::indexOfChild(const QAccessibleInterface * );
impl /*struct*/ QAccessibleApplication {
pub fn indexOfChild<RetType, T: QAccessibleApplication_indexOfChild<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.indexOfChild(self);
// return 1;
}
}
pub trait QAccessibleApplication_indexOfChild<RetType> {
fn indexOfChild(self , rsthis: & QAccessibleApplication) -> RetType;
}
// proto: int QAccessibleApplication::indexOfChild(const QAccessibleInterface * );
impl<'a> /*trait*/ QAccessibleApplication_indexOfChild<i32> for (&'a QAccessibleInterface) {
fn indexOfChild(self , rsthis: & QAccessibleApplication) -> i32 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK22QAccessibleApplication12indexOfChildEPK20QAccessibleInterface()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZNK22QAccessibleApplication12indexOfChildEPK20QAccessibleInterface(rsthis.qclsinst, arg0)};
return ret as i32; // 1
// return 1;
}
}
impl /*struct*/ QAccessibleObject {
pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QAccessibleObject {
return QAccessibleObject{qbase: QAccessibleInterface::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
}
}
impl Deref for QAccessibleObject {
type Target = QAccessibleInterface;
fn deref(&self) -> &QAccessibleInterface {
return & self.qbase;
}
}
impl AsRef<QAccessibleInterface> for QAccessibleObject {
fn as_ref(& self) -> & QAccessibleInterface {
return & self.qbase;
}
}
// proto: void QAccessibleObject::QAccessibleObject(QObject * object);
impl /*struct*/ QAccessibleObject {
pub fn new<T: QAccessibleObject_new>(value: T) -> QAccessibleObject {
let rsthis = value.new();
return rsthis;
// return 1;
}
}
pub trait QAccessibleObject_new {
fn new(self) -> QAccessibleObject;
}
// proto: void QAccessibleObject::QAccessibleObject(QObject * object);
impl<'a> /*trait*/ QAccessibleObject_new for (&'a QObject) {
fn new(self) -> QAccessibleObject {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN17QAccessibleObjectC2EP7QObject()};
let ctysz: c_int = unsafe{QAccessibleObject_Class_Size()};
let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64;
let arg0 = self.qclsinst as *mut c_void;
let qthis: u64 = unsafe {C_ZN17QAccessibleObjectC2EP7QObject(arg0)};
let rsthis = QAccessibleObject{qbase: QAccessibleInterface::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
return rsthis;
// return 1;
}
}
// proto: QObject * QAccessibleObject::object();
impl /*struct*/ QAccessibleObject {
pub fn object<RetType, T: QAccessibleObject_object<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.object(self);
// return 1;
}
}
pub trait QAccessibleObject_object<RetType> {
fn object(self , rsthis: & QAccessibleObject) -> RetType;
}
// proto: QObject * QAccessibleObject::object();
impl<'a> /*trait*/ QAccessibleObject_object<QObject> for () {
fn object(self , rsthis: & QAccessibleObject) -> QObject {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK17QAccessibleObject6objectEv()};
let mut ret = unsafe {C_ZNK17QAccessibleObject6objectEv(rsthis.qclsinst)};
let mut ret1 = QObject::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QRect QAccessibleObject::rect();
impl /*struct*/ QAccessibleObject {
pub fn rect<RetType, T: QAccessibleObject_rect<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.rect(self);
// return 1;
}
}
pub trait QAccessibleObject_rect<RetType> {
fn rect(self , rsthis: & QAccessibleObject) -> RetType;
}
// proto: QRect QAccessibleObject::rect();
impl<'a> /*trait*/ QAccessibleObject_rect<QRect> for () {
fn rect(self , rsthis: & QAccessibleObject) -> QRect {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK17QAccessibleObject4rectEv()};
let mut ret = unsafe {C_ZNK17QAccessibleObject4rectEv(rsthis.qclsinst)};
let mut ret1 = QRect::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: QAccessibleInterface * QAccessibleObject::childAt(int x, int y);
impl /*struct*/ QAccessibleObject {
pub fn childAt<RetType, T: QAccessibleObject_childAt<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.childAt(self);
// return 1;
}
}
pub trait QAccessibleObject_childAt<RetType> {
fn childAt(self , rsthis: & QAccessibleObject) -> RetType;
}
// proto: QAccessibleInterface * QAccessibleObject::childAt(int x, int y);
impl<'a> /*trait*/ QAccessibleObject_childAt<QAccessibleInterface> for (i32, i32) {
fn childAt(self , rsthis: & QAccessibleObject) -> QAccessibleInterface {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK17QAccessibleObject7childAtEii()};
let arg0 = self.0 as c_int;
let arg1 = self.1 as c_int;
let mut ret = unsafe {C_ZNK17QAccessibleObject7childAtEii(rsthis.qclsinst, arg0, arg1)};
let mut ret1 = QAccessibleInterface::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: bool QAccessibleObject::isValid();
impl /*struct*/ QAccessibleObject {
pub fn isValid<RetType, T: QAccessibleObject_isValid<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isValid(self);
// return 1;
}
}
pub trait QAccessibleObject_isValid<RetType> {
fn isValid(self , rsthis: & QAccessibleObject) -> RetType;
}
// proto: bool QAccessibleObject::isValid();
impl<'a> /*trait*/ QAccessibleObject_isValid<i8> for () {
fn isValid(self , rsthis: & QAccessibleObject) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK17QAccessibleObject7isValidEv()};
let mut ret = unsafe {C_ZNK17QAccessibleObject7isValidEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// <= body block end
|
use std::fs;
use std::time::Instant;
fn main() {
let start = Instant::now();
let str_in = fs::read_to_string("input.txt").expect("Error in reading file");
let mut result: usize = 0;
for line in str_in.lines() {
let expr = Expression::from_string(line, false);
result += expr.evaluate();
}
println!(
"Part 1: The sum of the resulting values of each line = {}",
result
);
result = 0; // reset
for line in str_in.lines() {
let expr = Expression::from_string(line, true);
result += expr.evaluate();
}
println!(
"Part 2: The sum of the resulting values of each line = {}",
result
);
let duration = start.elapsed();
println!("Finished after {:?}", duration);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn tst_11() {
let expr = Expression::from_string("1 + 2 * 3 + 4 * 5 + 6", false);
assert_eq!(71, expr.evaluate());
}
#[test]
fn tst_12() {
let expr = Expression::from_string("1 + (2 * 3) + (4 * (5 + 6))", false);
assert_eq!(51, expr.evaluate());
}
#[test]
fn tst_13() {
let expr = Expression::from_string("2 * 3 + (4 * 5)", false);
assert_eq!(26, expr.evaluate());
}
#[test]
fn tst_14() {
let expr = Expression::from_string("5 + (8 * 3 + 9 + 3 * 4 * 3)", false);
assert_eq!(437, expr.evaluate());
}
#[test]
fn tst_15() {
let expr = Expression::from_string("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))", false);
assert_eq!(12240, expr.evaluate());
}
#[test]
fn tst_16() {
let expr =
Expression::from_string("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2", false);
assert_eq!(13632, expr.evaluate());
}
#[test]
fn tst_21() {
let expr = Expression::from_string("1 + 2 * 3 + 4 * 5 + 6", true);
assert_eq!(231, expr.evaluate());
}
#[test]
fn tst_22() {
let expr = Expression::from_string("1 + (2 * 3) + (4 * (5 + 6))", true);
assert_eq!(51, expr.evaluate());
}
#[test]
fn tst_23() {
let expr = Expression::from_string("2 * 3 + (4 * 5)", true);
assert_eq!(46, expr.evaluate());
}
#[test]
fn tst_24() {
let expr = Expression::from_string("5 + (8 * 3 + 9 + 3 * 4 * 3)", true);
assert_eq!(1445, expr.evaluate());
}
#[test]
fn tst_25() {
let expr = Expression::from_string("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))", true);
assert_eq!(669060, expr.evaluate());
}
#[test]
fn tst_26() {
let expr = Expression::from_string("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2", true);
assert_eq!(23340, expr.evaluate());
}
}
enum Operand {
Val(usize), // In case we have directly a digit
Expr(Box<Expression>), // In case we must evaluate a sub expression to get the value, it's containing the expression id.
}
#[derive(PartialEq)]
enum Operator {
Add,
Mul,
}
struct Expression {
left: Operand,
operator: Operator,
right: Operand,
}
impl Expression {
// return the root node of the tree, containing all the expressions contained in @str_in
fn from_string(str_in: &str, add_precedes: bool) -> Self {
let mut char_idx: usize = 0;
// Start with beginning of the input string, creating the first candidate root node.
let left = Expression::parse_operand(str_in, &mut char_idx, add_precedes);
let mut root = Expression::parse_operation(str_in, &mut char_idx, left, add_precedes);
// While the remaining string is not empty, continue in creating a new "root" node, and assigning its left value to the previous root node id.
while str_in.chars().nth(char_idx).is_some() {
let new_root = Expression::parse_operation(
&str_in,
&mut char_idx,
Operand::Expr(Box::new(root)),
add_precedes,
);
root = new_root;
}
root
}
fn parse_operation(
str_in: &str,
char_idx: &mut usize,
left: Operand,
add_precedes: bool,
) -> Expression {
let operator = Expression::parse_operator(str_in, char_idx);
let right = Expression::parse_operand(str_in, char_idx, add_precedes);
// What is coming next depends on the next symbol.
// If the '+' has a higher precedence on '*' (bool and_precedes = true) AND we just had a '*' and a '+' is coming next
// -> build a new child operation (so it get evaluated first), taking the current "right" operand as its "left" operand.
//
// Else, if we have a ')' it means we must return the current operation, because it won't have any child.
// Otherwise, we must build a new parent expression, and consider its left operand as the current operation.
let operation;
if add_precedes && operator == Operator::Mul && Expression::next_is_a('+', str_in, char_idx)
{
let child_op = Expression::parse_operation(str_in, char_idx, right, add_precedes);
return Expression {
left,
operator,
right: Operand::Expr(Box::new(child_op)),
};
} else {
operation = Expression {
left,
operator,
right,
};
if Expression::next_is_a(')', str_in, char_idx) {
*char_idx += 1;
//println!(" -> End of sub expression");
return operation;
} else if str_in.chars().nth(*char_idx) == None {
// end of line
return operation;
}
//println!(" -> New parent expression");
return Expression::parse_operation(
str_in,
char_idx,
Operand::Expr(Box::new(operation)),
add_precedes,
);
}
}
fn parse_operand(str_in: &str, char_idx: &mut usize, add_precedes: bool) -> Operand {
// Consume any whitespace
while str_in.chars().nth(*char_idx) == Some(' ') {
*char_idx += 1;
}
if Expression::next_is_a_digit(str_in, char_idx) {
let val = str_in.chars().nth(*char_idx).unwrap().to_digit(10).unwrap();
*char_idx += 1;
return Operand::Val(val as usize);
} else if Expression::next_is_a('(', str_in, char_idx) {
*char_idx += 1;
//println!(" -> New sub expression");
let left = Expression::parse_operand(str_in, char_idx, add_precedes);
let sub_operation = Expression::parse_operation(str_in, char_idx, left, add_precedes);
return Operand::Expr(Box::new(sub_operation));
} else {
println!("Error in parsing!! Expecting a digit or '('");
return Operand::Val(0);
}
}
fn parse_operator(str_in: &str, char_idx: &mut usize) -> Operator {
// Consume any whitespace
while str_in.chars().nth(*char_idx) == Some(' ') {
*char_idx += 1;
}
match str_in.chars().nth(*char_idx) {
Some('+') => {
*char_idx += 1;
return Operator::Add;
}
Some('*') => {
*char_idx += 1;
return Operator::Mul;
}
_ => {
println!("Error in parsing!! Expecting operator '+' or '*'");
return Operator::Add;
}
}
}
fn next_is_a_digit(str_in: &str, char_idx: &mut usize) -> bool {
// Consume any whitespace
while str_in.chars().nth(*char_idx) == Some(' ') {
*char_idx += 1;
}
if let Some(next_char) = str_in.chars().nth(*char_idx) {
if next_char.is_digit(10) {
return true;
}
}
return false;
}
fn next_is_a(c: char, str_in: &str, char_idx: &mut usize) -> bool {
// Consume any whitespace
while str_in.chars().nth(*char_idx) == Some(' ') {
*char_idx += 1;
}
if let Some(next_char) = str_in.chars().nth(*char_idx) {
if next_char == c {
return true;
}
}
return false;
}
fn evaluate(&self) -> usize {
let left = Expression::get_term(&self.left);
let right = Expression::get_term(&self.right);
match self.operator {
Operator::Add => left + right,
Operator::Mul => left * right,
}
}
fn get_term(op: &Operand) -> usize {
match op {
Operand::Val(x) => x.clone(),
Operand::Expr(x) => x.evaluate(),
}
}
}
|
use proc_macro2::TokenStream;
use syn::{self, DeriveInput};
type Fields = syn::punctuated::Punctuated<syn::Field, syn::token::Comma>;
fn derive_trait<F>(
input: &DeriveInput,
trait_name: TokenStream,
generics: &syn::Generics,
body: F
) -> TokenStream
where
F: FnOnce() -> TokenStream,
{
let struct_name = &input.ident;
let (impl_generics, _, where_clause) = generics.split_for_impl();
let (_, ty_generics, _) = input.generics.split_for_impl();
let body = body();
quote! {
impl #impl_generics #trait_name for #struct_name #ty_generics #where_clause {
#body
}
}
}
fn derive_simple_trait<F>(
input: &DeriveInput,
trait_name: TokenStream,
t: &syn::TypeParam,
body: F,
) -> TokenStream
where
F: FnOnce() -> TokenStream,
{
let mut generics = input.generics.clone();
generics
.make_where_clause()
.predicates
.push(parse_quote!(#t: #trait_name));
derive_trait(input, trait_name, &generics, body)
}
fn each_field_except_unit<F>(
fields: &Fields,
unit: &syn::Field,
mut field_expr: F,
) -> TokenStream
where
F: FnMut(&syn::Ident) -> TokenStream,
{
fields.iter().filter(|f| f.ident != unit.ident).fold(quote! {}, |body, field| {
let name = field.ident.as_ref().unwrap();
let expr = field_expr(name);
quote! {
#body
#expr
}
})
}
fn derive_struct_body<F>(
fields: &Fields,
unit: &syn::Field,
mut field_expr: F,
) -> TokenStream
where
F: FnMut(&syn::Ident) -> TokenStream,
{
let body = each_field_except_unit(fields, unit, |name| {
let expr = field_expr(name);
quote! {
#name: #expr,
}
});
let unit_name = unit.ident.as_ref().unwrap();
quote! {
Self {
#body
#unit_name: PhantomData,
}
}
}
fn clone_impl(input: &DeriveInput, fields: &Fields, unit: &syn::Field, t: &syn::TypeParam) -> TokenStream {
derive_simple_trait(input, quote! { Clone }, t, || {
let body = derive_struct_body(fields, unit, |name| {
quote! { self.#name.clone() }
});
quote! {
fn clone(&self) -> Self {
#body
}
}
})
}
fn copy_impl(input: &DeriveInput, t: &syn::TypeParam) -> TokenStream {
derive_simple_trait(input, quote!{ Copy }, t, || quote! {})
}
fn eq_impl(input: &DeriveInput, t: &syn::TypeParam) -> TokenStream {
derive_simple_trait(input, quote!{ ::core::cmp::Eq }, t, || quote! {})
}
fn partialeq_impl(input: &DeriveInput, fields: &Fields, unit: &syn::Field, t: &syn::TypeParam) -> TokenStream {
derive_simple_trait(input, quote!{ ::core::cmp::PartialEq }, t, || {
let body = each_field_except_unit(fields, unit, |name| {
quote! { && self.#name == other.#name }
});
quote! {
fn eq(&self, other: &Self) -> bool {
true #body
}
}
})
}
fn hash_impl(input: &DeriveInput, fields: &Fields, unit: &syn::Field, t: &syn::TypeParam) -> TokenStream {
derive_simple_trait(input, quote!{ ::core::hash::Hash }, t, || {
let body = each_field_except_unit(fields, unit, |name| {
quote! { self.#name.hash(h); }
});
quote! {
fn hash<H: ::core::hash::Hasher>(&self, h: &mut H) {
#body
}
}
})
}
fn serde_impl(
input: &DeriveInput,
fields: &Fields,
unit: &syn::Field,
t: &syn::TypeParam,
) -> TokenStream {
let deserialize_impl = {
let mut generics = input.generics.clone();
generics.params.insert(0, parse_quote!('de));
generics
.make_where_clause()
.predicates
.push(parse_quote!(#t: ::serde::Deserialize<'de>));
derive_trait(input, quote!{ ::serde::Deserialize<'de> }, &generics, || {
let tuple = each_field_except_unit(fields, unit, |name| {
quote! { #name, }
});
let body = derive_struct_body(fields, unit, |name| quote! { #name });
quote! {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>,
{
let (#tuple) = ::serde::Deserialize::deserialize(deserializer)?;
Ok(#body)
}
}
})
};
let serialize_impl = derive_simple_trait(input, quote! { ::serde::Serialize }, t, || {
let tuple = each_field_except_unit(fields, unit, |name| {
quote! { &self.#name, }
});
quote! {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::Serializer,
{
(#tuple).serialize(serializer)
}
}
});
quote! {
#[cfg(feature = "serde")]
#serialize_impl
#[cfg(feature = "serde")]
#deserialize_impl
}
}
pub fn derive(input: DeriveInput) -> TokenStream {
let s = match input.data {
syn::Data::Struct(ref s) => s,
_ => panic!("Need to derive this on a struct"),
};
let fields = match s.fields {
syn::Fields::Named(ref named) => &named.named,
_ => panic!("Need to use named fields"),
};
assert!(!fields.is_empty());
let unit_field = fields.last().unwrap();
assert_eq!(
unit_field.value().ident.as_ref().unwrap().to_string(),
"_unit",
"You need to have a _unit field to derive this trait",
);
assert!(match unit_field.value().vis {
syn::Visibility::Public(..) => true,
_ => false,
}, "Unit field should be public");
assert!(input.attrs.iter().filter_map(|attr| attr.interpret_meta()).any(|attr| {
match attr {
syn::Meta::Word(..) |
syn::Meta::NameValue(..) => false,
syn::Meta::List(ref list) => {
list.ident == "repr" && list.nested.iter().any(|meta| {
match *meta {
syn::NestedMeta::Meta(syn::Meta::Word(ref w)) => w == "C",
_ => false,
}
})
}
}
}), "struct should be #[repr(C)]");
let type_param =
input.generics.type_params().next().cloned().expect("Need a T");
let clone = clone_impl(&input, fields, unit_field.value(), &type_param);
let copy = copy_impl(&input, &type_param);
let serde = serde_impl(&input, fields, unit_field.value(), &type_param);
let eq = eq_impl(&input, &type_param);
let partialeq = partialeq_impl(&input, fields, unit_field.value(), &type_param);
let hash = hash_impl(&input, fields, unit_field.value(), &type_param);
quote! {
#clone
#copy
#serde
#eq
#partialeq
#hash
}
}
|
use std::collections::VecDeque;
use std::fs::File;
use std::io::Read;
#[repr(u8)]
#[derive(PartialEq)]
enum ParamType {
PositionMode = 0,
ImmediateMode = 1,
RelativeMode = 2,
}
fn get_param_type(mode: i64) -> ParamType {
match mode {
0 => ParamType::PositionMode,
1 => ParamType::ImmediateMode,
2 => ParamType::RelativeMode,
u => panic!("Unexpected parameter type: {}", u),
}
}
struct Param {
mode: ParamType,
value: i64,
relative_base: i64,
}
impl Param {
fn new(vec: &Vec<i64>, index: usize, param_index: usize, relative_base: i64) -> Param {
let mode = get_param_type((vec[index] / 10i64.pow((param_index + 1) as u32)) % 10);
let value = vec[index + param_index];
Param { mode, value, relative_base }
}
fn get_value(&self, vec: &Vec<i64>) -> i64 {
match self.mode {
ParamType::PositionMode => vec[self.value as usize],
ParamType::ImmediateMode => self.value,
ParamType::RelativeMode => vec[(self.value + self.relative_base) as usize],
}
}
fn set_value(&self, vec: &mut Vec<i64>, value: i64) {
match self.mode {
ParamType::PositionMode => vec[self.value as usize] = value,
ParamType::ImmediateMode => panic!("set_value called with a parameter in ImmediateMode!"),
ParamType::RelativeMode => vec[(self.value + self.relative_base) as usize] = value,
}
}
}
struct Program {
state: Vec<i64>,
current_op: usize,
finished: bool,
input: VecDeque<i64>,
relative_base: i64,
}
impl Program {
fn new(program: &Vec<i64>, input: VecDeque<i64>) -> Program {
let mut state = vec![0; 10000];
state.as_mut_slice()[0..program.len()].copy_from_slice(program.as_slice());
Program {
state,
current_op: 0,
finished: false,
input,
relative_base: 0,
}
}
fn is_finished(&self) -> bool {
self.finished
}
fn get_params(&self, num_params: usize) -> Vec<Param> {
let mut params = Vec::new();
for i in 1..num_params + 1 {
params.push(Param::new(&self.state, self.current_op, i, self.relative_base));
}
params
}
fn op_add(&mut self) {
let params = self.get_params(3);
let sum = params[0].get_value(&self.state) + params[1].get_value(&self.state);
params[2].set_value(&mut self.state, sum);
self.current_op += 4;
}
fn op_mul(&mut self) {
let params = self.get_params(3);
let product = params[0].get_value(&self.state) * params[1].get_value(&self.state);
params[2].set_value(&mut self.state, product);
self.current_op += 4;
}
fn op_input(&mut self) -> bool {
let params = self.get_params(1);
let input = self.input.pop_back();
if input.is_none() {
return false;
}
params[0].set_value(&mut self.state, input.unwrap());
self.current_op += 2;
return true;
}
fn op_output(&mut self, output: &mut i64) {
let params = self.get_params(1);
*output = params[0].get_value(&self.state);
self.current_op += 2;
println!("Output: {}", output);
}
fn op_jump_if_true(&mut self) {
let params = self.get_params(2);
if params[0].get_value(&self.state) != 0 {
self.current_op = params[1].get_value(&self.state) as usize;
} else {
self.current_op += 3;
}
}
fn op_jump_if_false(&mut self) {
let params = self.get_params(2);
if params[0].get_value(&self.state) == 0 {
self.current_op = params[1].get_value(&self.state) as usize;
} else {
self.current_op += 3;
}
}
fn op_lessthan(&mut self) {
let params = self.get_params(3);
let to_store;
if params[0].get_value(&self.state) < params[1].get_value(&self.state) {
to_store = 1;
} else {
to_store = 0;
}
params[2].set_value(&mut self.state, to_store);
self.current_op += 4;
}
fn op_equal(&mut self) {
let params = self.get_params(3);
let to_store;
if params[0].get_value(&self.state) == params[1].get_value(&self.state) {
to_store = 1;
} else {
to_store = 0;
}
params[2].set_value(&mut self.state, to_store);
self.current_op += 4;
}
fn op_adjust_relative_base(&mut self) {
let params = self.get_params(1);
self.relative_base += params[0].get_value(&self.state);
self.current_op += 2;
}
fn run_program(&mut self, input: i64) -> i64 {
let mut output = 0;
self.input.push_front(input);
while self.current_op < self.state.len() {
match self.state[self.current_op] % 100 {
1 => self.op_add(),
2 => self.op_mul(),
3 => {
if !self.op_input() {
break;
}
}
4 => self.op_output(&mut output),
5 => self.op_jump_if_true(),
6 => self.op_jump_if_false(),
7 => self.op_lessthan(),
8 => self.op_equal(),
9 => self.op_adjust_relative_base(),
99 => {
self.finished = true;
break;
}
_ => panic!("Invalid opcode!"),
}
}
output
}
}
pub fn run_puzzle() {
let mut file = File::open("input_day9.txt").expect("Failed to open input_day9.txt");
let mut ops_string = String::new();
file.read_to_string(&mut ops_string).unwrap();
let vec: Vec<i64> = ops_string.split(',').map(|text| text.trim().parse().unwrap()).collect();
let mut program = Program::new(&vec, VecDeque::new());
program.run_program(2);
assert!(program.is_finished());
}
|
use crate::axis_measure::TableAxis;
use druid::{theme, Color, Env, KeyOrValue};
pub(crate) const DEFAULT_COL_HEADER_HEIGHT: f64 = 25.0;
pub(crate) const DEFAULT_ROW_HEADER_WIDTH: f64 = 100.0;
#[derive(Clone)]
pub struct TableConfig {
pub col_header_height: KeyOrValue<f64>,
pub row_header_width: KeyOrValue<f64>,
pub header_background: KeyOrValue<Color>,
pub header_selected_background: KeyOrValue<Color>,
pub cells_background: KeyOrValue<Color>,
pub cells_border: KeyOrValue<Color>,
pub cell_border_thickness: KeyOrValue<f64>,
pub cell_padding: KeyOrValue<f64>,
pub selection_color: KeyOrValue<Color>,
pub focus_color: KeyOrValue<Color>,
}
pub struct ResolvedTableConfig {
pub(crate) col_header_height: f64,
pub(crate) row_header_width: f64,
pub(crate) header_background: Color,
pub(crate) header_selected_background: Color,
pub(crate) cells_background: Color,
pub(crate) cells_border: Color,
pub(crate) cell_border_thickness: f64,
pub(crate) cell_padding: f64,
pub(crate) selection_color: Color,
pub(crate) focus_color: Color,
}
impl ResolvedTableConfig {
pub(crate) fn cross_axis_length(&self, axis: &TableAxis) -> f64 {
match axis {
TableAxis::Columns => self.col_header_height,
TableAxis::Rows => self.row_header_width,
}
}
}
impl Default for TableConfig {
fn default() -> Self {
Self::new()
}
}
impl TableConfig {
pub fn new() -> TableConfig {
TableConfig {
col_header_height: DEFAULT_COL_HEADER_HEIGHT.into(),
row_header_width: DEFAULT_ROW_HEADER_WIDTH.into(),
header_background: theme::BACKGROUND_DARK.into(),
header_selected_background: theme::PLACEHOLDER_COLOR.into(),
cells_background: theme::LABEL_COLOR.into(),
cells_border: theme::BORDER_LIGHT.into(),
cell_border_thickness: 0.5.into(),
cell_padding: 2.0.into(),
selection_color: Color::rgb8(0xB0, 0xEE, 0xFF).into(),
focus_color: Color::rgb8(0x4D, 0x58, 0xD8).into(),
}
}
pub(crate) fn resolve(&self, env: &Env) -> ResolvedTableConfig {
ResolvedTableConfig {
row_header_width: self.row_header_width.resolve(env),
col_header_height: self.col_header_height.resolve(env),
header_background: self.header_background.resolve(env),
header_selected_background: self.header_selected_background.resolve(env),
cells_background: self.cells_background.resolve(env),
cells_border: self.cells_border.resolve(env),
cell_border_thickness: self.cell_border_thickness.resolve(env),
cell_padding: self.cell_padding.resolve(env),
selection_color: self.selection_color.resolve(env),
focus_color: self.focus_color.resolve(env),
}
}
}
|
#[no_mangle]
pub unsafe extern "C" fn atom_setfloat(a: *mut max_sys::t_atom, b: f64) -> max_sys::t_max_err {
if a.is_null() {
max_sys::e_max_errorcodes::MAX_ERR_GENERIC as _
} else {
(*a).a_type = max_sys::e_max_atomtypes::A_FLOAT as _;
(*a).a_w.w_float = b;
max_sys::e_max_errorcodes::MAX_ERR_NONE as _
}
}
#[no_mangle]
pub unsafe extern "C" fn atom_setlong(
a: *mut max_sys::t_atom,
b: max_sys::t_atom_long,
) -> max_sys::t_max_err {
(*a).a_type = max_sys::e_max_atomtypes::A_LONG as _;
(*a).a_w.w_long = b;
max_sys::e_max_errorcodes::MAX_ERR_NONE as _
}
|
mod models;
fn main() {
let d1 = models::Stock::default();
println!("d1 = {:?}", &d1);
let cmd1 = models::Command::Create { id: "data1".to_string() };
let d2 = d1.handle(&cmd1);
println!("d2 = {:?}", &d2);
assert_eq!(&d2.id, "data1");
assert_eq!(d2.qty, 0);
let cmd2 = models::Command::Update { id: "data1".to_string(), qty: 5 };
let d3 = d2.handle(&cmd2);
println!("d3 = {:?}", &d3);
assert_eq!(&d3.id, "data1");
assert_eq!(d3.qty, 5);
let cmd3 = models::Command::Update { id: "data2".to_string(), qty: 10 };
let d4 = d3.handle(&cmd3);
println!("d4 = {:?}", &d4);
assert_eq!(&d4.id, "data1");
assert_eq!(d4.qty, 5);
let d5 = d4.handle(&cmd1);
println!("d5 = {:?}", &d5);
assert_eq!(&d5.id, "data1");
assert_eq!(d5.qty, 5);
let d6 = models::Stock::default().handle(&cmd3);
println!("d6 = {:?}", &d6);
assert!(d6.id.is_empty());
assert_eq!(d6.qty, 0);
}
|
use super::{utils::heap_object_impls, HeapObjectTrait};
use crate::{
heap::{object_heap::HeapObject, symbol_table::impl_ord_with_symbol_table_via_ord, Heap},
utils::{impl_debug_display_via_debugdisplay, impl_eq_hash_ord_via_get, DebugDisplay},
};
use candy_frontend::hir::Id;
use derive_more::Deref;
use rustc_hash::FxHashMap;
use std::{
fmt::{self, Formatter},
mem,
ptr::{self, NonNull},
};
#[derive(Clone, Copy, Deref)]
pub struct HeapHirId(HeapObject);
impl HeapHirId {
pub fn new_unchecked(object: HeapObject) -> Self {
Self(object)
}
pub fn create(heap: &mut Heap, is_reference_counted: bool, value: Id) -> Self {
let id = HeapHirId(heap.allocate(
HeapObject::KIND_HIR_ID,
is_reference_counted,
0,
mem::size_of::<Id>(),
));
unsafe { ptr::write(id.id_pointer().as_ptr(), value) };
id
}
fn id_pointer(self) -> NonNull<Id> {
self.content_word_pointer(0).cast()
}
pub fn get<'a>(self) -> &'a Id {
unsafe { &*self.id_pointer().as_ptr() }
}
}
impl DebugDisplay for HeapHirId {
fn fmt(&self, f: &mut Formatter, _is_debug: bool) -> fmt::Result {
write!(f, "{}", self.get())
}
}
impl_debug_display_via_debugdisplay!(HeapHirId);
impl_eq_hash_ord_via_get!(HeapHirId);
heap_object_impls!(HeapHirId);
impl HeapObjectTrait for HeapHirId {
fn content_size(self) -> usize {
mem::size_of::<Id>()
}
fn clone_content_to_heap_with_mapping(
self,
_heap: &mut Heap,
clone: HeapObject,
_address_map: &mut FxHashMap<HeapObject, HeapObject>,
) {
let clone = Self(clone);
let value = self.get().to_owned();
unsafe { ptr::write(clone.id_pointer().as_ptr(), value) };
}
fn drop_children(self, _heap: &mut Heap) {}
fn deallocate_external_stuff(self) {
unsafe { ptr::drop_in_place(self.id_pointer().as_ptr()) };
}
}
impl_ord_with_symbol_table_via_ord!(HeapHirId);
|
use game::*;
pub struct SpellResult{
pub cooldown: u32,
pub channel_for: u32
}
pub trait Castable{
fn cast(&mut self, &mut Game1, &mut Caster, x: f64, y: f64)->SpellResult;
fn on_acquire(&mut self, &mut Game1, &mut Caster);
fn on_loss(&self, &mut Game1, &mut Caster);
}
pub trait Spell{
fn get_castable(&mut self)->Box<Castable>;
fn get_spell_sprite(&self)->u32;
}
pub struct Caster{
pub spell_slots: Vec<Option<Box<Castable>>>,
pub casting: Option<(usize, i16, i16)>,
pub channeling: Option<(usize, u32)>,
pub cooldown: u32,
pub mover_id: MoverID,
pub suppressable_id: SuppressableID,
pub allegiance_id: AllegianceID
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
pub struct DebugSpell{
}
pub struct DebugCastable{
pub cast_times: u32
}
pub struct BlinkSpell{
}
pub struct BlinkCastable{
}
|
//! Adapted version of LALRPOP lexer.
use std::str::CharIndices;
use unicode_xid::UnicodeXID;
use self::ErrorCode::*;
use self::Tok::*;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Error {
pub location: usize,
pub code: ErrorCode,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ErrorCode {
UnrecognizedToken,
UnterminatedStringLiteral,
}
fn error<T>(c: ErrorCode, l: usize) -> Result<T, Error> {
Err(Error {
location: l,
code: c,
})
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Tok<'input> {
// Keywords;
Public,
Static,
Main,
Class,
Extends,
If,
Else,
While,
Return,
Length,
True,
False,
This,
New,
Throws,
IOException,
Void,
Bool,
Char,
Int,
Write,
Read,
Println,
Bang,
DoubleAmpersand,
Comma,
Semicolon,
Dot,
Equals,
Lt,
LeftParen,
RightParen,
LeftBrace,
RightBrace,
LeftBracket,
RightBracket,
Plus,
Minus,
Star,
Slash,
String(&'input str),
Id(&'input str),
Num(i32),
}
pub struct Tokenizer<'input> {
text: &'input str,
chars: CharIndices<'input>,
lookahead: Option<(usize, char)>,
shift: usize,
}
pub type Spanned<T> = (usize, T, usize);
const KEYWORDS: &'static [(&'static str, Tok<'static>)] = &[
("public", Public),
("static", Static),
("main", Main),
("class", Class),
("extends", Extends),
("while", While),
("if", If),
("else", Else),
("return", Return),
("true", True),
("false", False),
("this", This),
("new", New),
("length", Length),
("void", Void),
("char", Char),
("boolean", Bool),
("int", Int),
("throws", Throws),
];
impl<'input> Tokenizer<'input> {
pub fn new(text: &'input str, shift: usize) -> Tokenizer<'input> {
let mut t = Tokenizer {
text: text,
chars: text.char_indices(),
lookahead: None,
shift: shift,
};
t.bump();
t
}
fn next_unshifted(&mut self) -> Option<Result<Spanned<Tok<'input>>, Error>> {
loop {
return match self.lookahead {
Some((idx0, '!')) => {
self.bump();
Some(Ok((idx0, Bang, idx0 + 1)))
}
Some((idx0, '&')) => match self.bump() {
Some((idx1, '&')) => {
self.bump();
Some(Ok((idx0, DoubleAmpersand, idx1 + 1)))
}
_ => Some(error(UnrecognizedToken, idx0)),
},
Some((idx0, ',')) => {
self.bump();
Some(Ok((idx0, Comma, idx0 + 1)))
}
Some((idx0, ';')) => {
self.bump();
Some(Ok((idx0, Semicolon, idx0 + 1)))
}
Some((idx0, '.')) => {
self.bump();
Some(Ok((idx0, Dot, idx0 + 1)))
}
Some((idx0, '=')) => {
self.bump();
Some(Ok((idx0, Equals, idx0 + 1)))
}
Some((idx0, '{')) => {
self.bump();
Some(Ok((idx0, LeftBrace, idx0 + 1)))
}
Some((idx0, '[')) => {
self.bump();
Some(Ok((idx0, LeftBracket, idx0 + 1)))
}
Some((idx0, '(')) => {
self.bump();
Some(Ok((idx0, LeftParen, idx0 + 1)))
}
Some((idx0, '<')) => {
self.bump();
Some(Ok((idx0, Lt, idx0 + 1)))
}
Some((idx0, '+')) => {
self.bump();
Some(Ok((idx0, Plus, idx0 + 1)))
}
Some((idx0, '-')) => {
self.bump();
Some(Ok((idx0, Minus, idx0 + 1)))
}
Some((idx0, '*')) => {
self.bump();
Some(Ok((idx0, Star, idx0 + 1)))
}
Some((idx0, '/')) => match self.bump() {
Some((_, '/')) => {
self.line_comment();
continue;
}
Some((_, '*')) => {
self.comment();
continue;
}
_ => Some(Ok((idx0, Slash, idx0 + 1))),
},
Some((idx0, '}')) => {
self.bump();
Some(Ok((idx0, RightBrace, idx0 + 1)))
}
Some((idx0, ']')) => {
self.bump();
Some(Ok((idx0, RightBracket, idx0 + 1)))
}
Some((idx0, ')')) => {
self.bump();
Some(Ok((idx0, RightParen, idx0 + 1)))
}
Some((idx0, '"')) => {
self.bump();
Some(self.string_literal(idx0))
}
Some((idx0, c)) if is_identifier_start(c) => Some(self.identifierish(idx0)),
Some((idx0, c)) if c.is_numeric() => Some(self.number(idx0)),
Some((_, c)) if c.is_whitespace() => {
self.bump();
continue;
}
Some((idx, _)) => Some(error(UnrecognizedToken, idx)),
None => None,
};
}
}
fn bump(&mut self) -> Option<(usize, char)> {
self.lookahead = self.chars.next();
self.lookahead
}
fn string_literal(&mut self, idx0: usize) -> Result<Spanned<Tok<'input>>, Error> {
let mut escape = false;
let terminate = |c: char| {
if escape {
escape = false;
false
} else if c == '\\' {
escape = true;
false
} else if c == '"' {
true
} else {
false
}
};
match self.take_until(terminate) {
Some(idx1) => {
self.bump(); // consume the '"'
let text = &self.text[idx0 + 1..idx1]; // do not include the "" in the str
Ok((idx0, String(text), idx1 + 1))
}
None => error(UnterminatedStringLiteral, idx0),
}
}
fn identifierish(&mut self, idx0: usize) -> Result<Spanned<Tok<'input>>, Error> {
let (start, word, end) = self.word(idx0);
let tok = KEYWORDS
.iter()
.filter(|&&(w, _)| w == word)
.map(|&(_, ref t)| t.clone())
.next()
.unwrap_or_else(|| Id(word));
let st = (start, tok, end);
// TODO: This is messy
if word == "System" {
match self.peek_extended_keyword() {
None => Ok(st),
Some(idx) => {
let w = &self.text[end..idx];
if w == ".out.println" {
// TODO
self.bump();
let id1 = self.lookahead.unwrap().0;
self.word(id1);
self.bump();
let id2 = self.lookahead.unwrap().0;
self.word(id2);
Ok((start, Println, idx))
} else if w == ".out.write" {
self.bump();
let id1 = self.lookahead.unwrap().0;
self.word(id1);
self.bump();
let id2 = self.lookahead.unwrap().0;
self.word(id2);
Ok((start, Write, idx))
} else if w == ".in.read" {
self.bump();
let id1 = self.lookahead.unwrap().0;
self.word(id1);
self.bump();
let id2 = self.lookahead.unwrap().0;
self.word(id2);
Ok((start, Read, idx))
} else {
Ok(st)
}
}
}
} else if word == "java" {
match self.peek_extended_keyword() {
None => Ok(st),
Some(idx) => {
let w = &self.text[end..idx];
if w == ".io.IOException" {
self.bump();
let id1 = self.lookahead.unwrap().0;
self.word(id1);
self.bump();
let id2 = self.lookahead.unwrap().0;
self.word(id2);
Ok((start, IOException, idx))
} else {
Ok(st)
}
}
}
} else {
Ok(st)
}
}
fn number(&mut self, idx0: usize) -> Result<Spanned<Tok<'input>>, Error> {
let (start, word, end) = self.numeric(idx0);
match word.parse::<i32>() {
Ok(i) => Ok((start, Num(i), end)),
Err(_) => Err(Error {
location: start,
code: UnrecognizedToken,
}),
}
}
fn numeric(&mut self, idx0: usize) -> Spanned<&'input str> {
match self.take_while(|c| c.is_numeric()) {
Some(end) => (idx0, &self.text[idx0..end], end),
None => (idx0, &self.text[idx0..], self.text.len()),
}
}
fn word(&mut self, idx0: usize) -> Spanned<&'input str> {
match self.take_while(is_identifier_continue) {
Some(end) => (idx0, &self.text[idx0..end], end),
None => (idx0, &self.text[idx0..], self.text.len()),
}
}
fn line_comment(&mut self) {
self.take_until(|c| c == '\n');
self.bump();
}
fn comment(&mut self) {
loop {
self.take_until(|c| c == '*');
self.bump();
match self.lookahead {
None => return (),
Some((_, '/')) => {
self.bump();
return ();
}
Some(_) => (),
}
}
}
fn take_while<F>(&mut self, mut keep_going: F) -> Option<usize>
where
F: FnMut(char) -> bool,
{
self.take_until(|c| !keep_going(c))
}
fn take_until<F>(&mut self, mut terminate: F) -> Option<usize>
where
F: FnMut(char) -> bool,
{
loop {
match self.lookahead {
None => {
return None;
}
Some((idx1, c)) => {
if terminate(c) {
return Some(idx1);
} else {
self.bump();
}
}
}
}
}
fn peek_extended_keyword(&self) -> Option<usize> {
let mut chars: CharIndices = self.chars.clone();
let mut l = self.lookahead;
loop {
match l {
None => return None,
Some((idx1, c)) => {
if !(is_identifier_continue(c) || c == '.') {
return Some(idx1);
} else {
l = chars.next();
}
}
}
}
}
}
impl<'input> Iterator for Tokenizer<'input> {
type Item = Result<Spanned<Tok<'input>>, Error>;
fn next(&mut self) -> Option<Result<Spanned<Tok<'input>>, Error>> {
match self.next_unshifted() {
None => None,
Some(Ok((l, t, r))) => Some(Ok((l + self.shift, t, r + self.shift))),
Some(Err(Error { location, code })) => Some(Err(Error {
location: location + self.shift,
code: code,
})),
}
}
}
fn is_identifier_start(c: char) -> bool {
UnicodeXID::is_xid_start(c)
}
fn is_identifier_continue(c: char) -> bool {
UnicodeXID::is_xid_continue(c)
}
|
/*! Extracts textual content into new files, discarding metadata. Should produce an OSCAR v1 (2019) compatible corpus.
!*/
use std::fs::File;
use std::io::BufWriter;
use std::path::Path;
use oscar_io::oscar_doc::{Document, SplitFolderReader};
use crate::error::Error;
pub trait ExtractText {
fn extract_from_path(src: &Path, dst: &Path, del_src: bool) -> Result<(), Error> {
let mut reader = SplitFolderReader::new(src)?;
// let file = File::open(src)?;
// let bufread = BufReader::new(file);
// if dst.exists() {
// error!("File exist!");
// return Err(std::io::Error::new(
// std::io::ErrorKind::AlreadyExists,
// format!("File exist {:?}", dst),
// )
// .into());
// }
let dst_file = File::create(dst)?;
let mut dst_buf = BufWriter::new(dst_file);
Self::extract_text(&mut reader, &mut dst_buf)?;
if del_src {
std::fs::remove_file(src)?;
}
Ok(())
}
fn extract_text<T, U>(src: &mut T, dst: &mut U) -> Result<(), Error>
where
T: Iterator<Item = Result<Document, oscar_io::error::Error>>,
U: std::io::Write,
{
for doc in src {
let doc = doc?;
let mut extracted = Self::extract_content(&doc).to_string();
extracted.push_str("\n\n");
let string_size = extracted.len();
let written_byte = dst.write(extracted.as_bytes())?;
if string_size != written_byte {
return Err(Error::Custom("could not write extracted text".to_string()));
}
}
dst.flush()?;
Ok(())
}
fn extract_content(doc: &Document) -> &str {
doc.content()
}
// fn extract_json(doc: String) -> Result<String, Error> {
// let document: serde_json::Value = serde_json::from_str(&doc)?;
// match &document["content"] {
// serde_json::Value::String(content) => Ok(content.to_string()),
// other => Err(Error::MalformedContent(other.clone())),
// }
// }
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use oscar_io::oscar_doc::{Document, Metadata};
use super::ExtractText;
struct TestExtract;
impl ExtractText for TestExtract {}
#[test]
fn test_extract_json() {
let test = "foo";
let doc = Document::new(test.to_string(), HashMap::new(), Metadata::default());
let res = TestExtract::extract_content(&doc);
assert_eq!("foo", res);
}
#[test]
fn test_extract_text() {
let mut test = vec![
"words like words",
"when to use\nit",
"not so good",
"to start\n with",
]
.into_iter()
.map(|x| {
Ok(Document::new(
x.to_string(),
HashMap::new(),
Metadata::default(),
))
});
let mut res = vec![];
TestExtract::extract_text(&mut test, &mut res).unwrap();
let res = String::from_utf8_lossy(&res);
let expected = "words like words
when to use
it
not so good
to start
with
";
assert_eq!(res, expected);
}
}
|
pub mod home;
pub mod settings;
pub mod organizations;
pub mod quickstars;
pub mod addons; |
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use failure::Error;
use fidl::endpoints::ServerEnd;
use fidl_fuchsia_auth::{AuthProviderConfig, AuthProviderMarker};
use fidl_fuchsia_identity_internal::{
AccountHandlerContextRequest, AccountHandlerContextRequestStream,
};
use futures::prelude::*;
use std::collections::HashMap;
use token_manager::AuthProviderConnection;
/// A type that can respond to`AccountHandlerContext` requests from the AccountHandler components
/// that we launch. These requests provide contextual and service information to the
/// AccountHandlers, such as connections to components implementing the `AuthProviderFactory`
/// interface.
pub struct AccountHandlerContext {
/// A map from auth_provider_type to an `AuthProviderConnection` used to launch the associated
/// component.
auth_provider_connections: HashMap<String, AuthProviderConnection>,
}
impl AccountHandlerContext {
/// Creates a new `AccountHandlerContext` from the supplied vector of `AuthProviderConfig`
/// objects.
pub fn new(auth_provider_configs: &[AuthProviderConfig]) -> AccountHandlerContext {
AccountHandlerContext {
auth_provider_connections: auth_provider_configs
.iter()
.map(|apc| {
(apc.auth_provider_type.clone(), AuthProviderConnection::from_config_ref(apc))
})
.collect(),
}
}
/// Asynchronously handles the supplied stream of `AccountHandlerContextRequest` messages.
pub async fn handle_requests_from_stream(
&self,
mut stream: AccountHandlerContextRequestStream,
) -> Result<(), Error> {
while let Some(req) = stream.try_next().await? {
self.handle_request(req).await?;
}
Ok(())
}
/// Asynchronously handles a single `AccountHandlerContextRequest`.
async fn handle_request(&self, req: AccountHandlerContextRequest) -> Result<(), fidl::Error> {
match req {
AccountHandlerContextRequest::GetAuthProvider {
auth_provider_type,
auth_provider,
responder,
} => responder
.send(&mut self.get_auth_provider(&auth_provider_type, auth_provider).await),
}
}
async fn get_auth_provider<'a>(
&'a self,
auth_provider_type: &'a str,
auth_provider: ServerEnd<AuthProviderMarker>,
) -> Result<(), fidl_fuchsia_identity_account::Error> {
match self.auth_provider_connections.get(auth_provider_type) {
Some(apc) => apc
.connect(auth_provider)
.await
.map_err(|_| fidl_fuchsia_identity_account::Error::Unknown),
None => Err(fidl_fuchsia_identity_account::Error::NotFound),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
/// Note: Most AccountHandlerContext methods launch instances of an AuthProvider. Since its
/// currently not convenient to mock out this component launching in Rust, we rely on the
/// hermetic component test to provide coverage for these areas and only cover the in-process
/// behavior with this unit-test.
#[test]
fn test_new() {
let dummy_configs = vec![
AuthProviderConfig {
auth_provider_type: "dummy_1".to_string(),
url: "fuchsia-pkg://fuchsia.com/dummy_ap_1#meta/ap.cmx".to_string(),
params: Some(vec!["test_arg_1".to_string()]),
},
AuthProviderConfig {
auth_provider_type: "dummy_2".to_string(),
url: "fuchsia-pkg://fuchsia.com/dummy_ap_2#meta/ap.cmx".to_string(),
params: None,
},
];
let dummy_config_1 = &dummy_configs[0];
let dummy_config_2 = &dummy_configs[1];
let test_object = AccountHandlerContext::new(&dummy_configs);
let test_connection_1 =
test_object.auth_provider_connections.get(&dummy_config_1.auth_provider_type).unwrap();
let test_connection_2 =
test_object.auth_provider_connections.get(&dummy_config_2.auth_provider_type).unwrap();
assert_eq!(test_connection_1.component_url(), dummy_config_1.url);
assert_eq!(test_connection_2.component_url(), dummy_config_2.url);
assert!(test_object.auth_provider_connections.get("bad url").is_none());
}
}
|
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
use gooey::{
core::{
assets::Image,
figures::{
DisplayScale, Displayable, Figure, Point, Rect, Rectlike, Scale, Scaled, Size,
SizedRect,
},
styles::{Color, Style, SystemTheme},
Context, Pixels, TransmogrifierContext, WidgetId,
},
frontends::{
browser::{
utils::{create_element, widget_css_id, window_document, CssBlockBuilder, CssRules},
ImageExt, RegisteredTransmogrifier, WebSys, WebSysTransmogrifier,
},
rasterizer::{ContentArea, ContentSize},
},
renderer::{Renderer, StrokeOptions, TextMetrics, TextOptions},
};
use js_sys::Function;
use wasm_bindgen::{prelude::Closure, JsCast, JsValue};
use web_sys::{CanvasRenderingContext2d, HtmlCanvasElement, HtmlImageElement};
use crate::{Canvas, CanvasRenderer, CanvasTransmogrifier, Command};
fn canvas_element(widget_id: &WidgetId) -> Option<HtmlCanvasElement> {
window_document()
.get_element_by_id(&widget_css_id(widget_id.id))
.and_then(|e| e.dyn_into::<HtmlCanvasElement>().ok())
}
impl BrowserRenderer {
fn canvas(&self) -> Option<HtmlCanvasElement> {
canvas_element(&self.widget)
}
fn rendering_context(&self) -> Option<CanvasRenderingContext2d> {
self.canvas()?
.get_context("2d")
.ok()?
.and_then(|c| c.dyn_into().ok())
}
fn clip(&self, context: &CanvasRenderingContext2d) {
context.begin_path();
context.rect(
self.clip.origin.x,
self.clip.origin.y,
self.clip.size.width,
self.clip.size.height,
);
context.clip();
context
.translate(self.clip.origin.x, self.clip.origin.y)
.unwrap();
}
}
impl CanvasTransmogrifier {
fn redraw(&self, context: &mut TransmogrifierContext<'_, CanvasTransmogrifier, WebSys>) {
let widget_context = Context::new(context.channels, context.frontend);
request_animation_frame(
widget_context,
context.state.redraw_already_requested.clone(),
);
}
}
fn request_animation_frame(context: Context<Canvas>, already_requested: Arc<AtomicBool>) {
if !already_requested.fetch_or(true, Ordering::SeqCst) {
let cb = Closure::once_into_js(move || {
already_requested.store(false, Ordering::SeqCst);
draw_frame(context);
});
web_sys::window()
.unwrap()
.request_animation_frame(cb.dyn_ref().unwrap())
.unwrap();
}
}
fn draw_frame(context: Context<Canvas>) {
context.map_mut(|canvas, context| {
let widget = context.widget().registration().unwrap().id().clone();
if let Some(canvas_element) = canvas_element(&widget) {
let scale = DisplayScale::new(
Scale::new(web_sys::window().unwrap().device_pixel_ratio() as f32),
Scale::new(1.),
);
let size = Size::<_, Pixels>::new(
canvas_element.client_width(),
canvas_element.client_height(),
)
.max(&Size::default())
.cast::<u32>();
canvas_element.set_width(size.width);
canvas_element.set_height(size.height);
let size = size.cast::<f32>().to_scaled(&scale);
let renderer = BrowserRenderer {
widget,
clip: SizedRect::from(size.cast::<f64>()),
theme: context.frontend().theme(),
scale,
};
canvas.renderable.render(
CanvasRenderer::BrowserRenderer(renderer),
&ContentArea {
size: ContentSize {
content: size,
..ContentSize::default()
},
location: Point::default(),
},
);
}
});
}
impl gooey::core::Transmogrifier<WebSys> for CanvasTransmogrifier {
type State = State;
type Widget = Canvas;
fn receive_command(
&self,
command: Command,
context: &mut TransmogrifierContext<'_, Self, WebSys>,
) {
let Command::Refresh = command;
self.redraw(context);
}
}
impl WebSysTransmogrifier for CanvasTransmogrifier {
fn transmogrify(
&self,
mut context: TransmogrifierContext<'_, Self, WebSys>,
) -> Option<web_sys::HtmlElement> {
let element = create_element::<HtmlCanvasElement>("canvas");
let css = self
.initialize_widget_element(&element, &context)
.unwrap_or_default();
context.state.css = Some(css);
// Setup a refresh-on-resize callback.
let widget_context = Context::from(&context);
let already_requested = context.state.redraw_already_requested.clone();
let onresize = Closure::wrap(Box::new(move || {
request_animation_frame(widget_context.clone(), already_requested.clone());
}) as Box<dyn Fn()>)
.into_js_value();
web_sys::window()
.unwrap()
.add_event_listener_with_callback("resize", &Function::from(onresize))
.unwrap();
// Initialize the canvas by drawing a frame.
self.redraw(&mut context);
Some(element.unchecked_into())
}
fn convert_style_to_css(&self, style: &Style, css: CssBlockBuilder) -> CssBlockBuilder {
self.convert_standard_components_to_css(style, css)
.with_css_statement("width: 100%")
.with_css_statement("height: 100%")
}
}
/// Renderer implementation that uses [`CanvasRenderingContext2d`].
///
/// ## User interface scaling (Points)
///
/// The renderer uses
/// [`Window::device_pixel_ratio()`](web_sys::Window::device_pixel_ratio) to
/// scale between [`Points`] and [`Pixels`].
#[derive(Debug)]
pub struct BrowserRenderer {
widget: WidgetId,
clip: SizedRect<f64, Scaled>,
theme: SystemTheme,
scale: DisplayScale<f32>,
}
impl Renderer for BrowserRenderer {
fn theme(&self) -> SystemTheme {
self.theme
}
fn size(&self) -> Size<f32, Scaled> {
self.clip.size.cast::<f32>()
}
fn clip_to(&self, bounds: Rect<f32, Scaled>) -> Self {
Self {
widget: self.widget.clone(),
clip: self
.clip
.intersection(&bounds.cast())
.unwrap_or_default()
.as_sized(),
theme: self.theme,
scale: self.scale,
}
}
fn clip_bounds(&self) -> Rect<f32, Scaled> {
Rect::from(self.clip.cast())
}
fn scale(&self) -> DisplayScale<f32> {
self.scale
}
fn render_text(
&self,
text: &str,
baseline_origin: impl Displayable<f32, Pixels = Point<f32, Pixels>>,
options: &TextOptions,
) {
if let Some(context) = self.rendering_context() {
let baseline_origin = baseline_origin.to_pixels(&self.scale);
context.save();
self.clip(&context);
context.set_fill_style(&JsValue::from_str(&options.color.as_css_string()));
context
.fill_text(text, baseline_origin.x as f64, baseline_origin.y as f64)
.unwrap();
context.restore();
}
}
fn measure_text(&self, text: &str, _options: &TextOptions) -> TextMetrics<Scaled> {
if let Some(context) = self.rendering_context() {
// TODO handle text options
let metrics = ExtendedTextMetrics::from(context.measure_text(text).unwrap());
TextMetrics {
width: Figure::new(metrics.width() as f32),
ascent: Figure::new(metrics.actual_bounding_box_ascent() as f32),
descent: Figure::new(metrics.actual_bounding_box_descent() as f32),
line_gap: Figure::default(),
}
} else {
TextMetrics::default()
}
}
fn stroke_rect(
&self,
rect: &impl Displayable<f32, Pixels = Rect<f32, Pixels>>,
options: &StrokeOptions,
) {
if let Some(context) = self.rendering_context() {
let rect = rect.to_pixels(&self.scale);
context.save();
self.clip(&context);
// TODO handle line width
context.set_stroke_style(&JsValue::from_str(&options.color.as_css_string()));
context.set_line_width(options.line_width.get() as f64);
let rect = rect.cast::<f64>().as_sized();
context.stroke_rect(
rect.origin.x,
rect.origin.y,
rect.size.width,
rect.size.height,
);
context.restore();
}
}
fn fill_rect(&self, rect: &impl Displayable<f32, Pixels = Rect<f32, Pixels>>, color: Color) {
if let Some(context) = self.rendering_context() {
context.save();
self.clip(&context);
context.set_fill_style(&JsValue::from_str(&color.as_css_string()));
let rect = rect.to_pixels(&self.scale).cast::<f64>().as_sized();
context.fill_rect(
rect.origin.x,
rect.origin.y,
rect.size.width,
rect.size.height,
);
context.restore();
}
}
fn stroke_line<P: Displayable<f32, Pixels = Point<f32, Pixels>>>(
&self,
point_a: P,
point_b: P,
options: &StrokeOptions,
) {
if let Some(context) = self.rendering_context() {
context.save();
self.clip(&context);
// TODO handle line width
context.set_stroke_style(&JsValue::from_str(&options.color.as_css_string()));
context.begin_path();
let point_a = point_a.to_pixels(&self.scale).cast::<f64>();
context.move_to(point_a.x, point_a.y);
let point_b = point_b.to_pixels(&self.scale).cast::<f64>();
context.line_to(point_b.x, point_b.y);
context.stroke();
context.restore();
}
}
fn draw_image(
&self,
image: &Image,
location: impl Displayable<f32, Pixels = Point<f32, Pixels>>,
) {
if let Some(context) = self.rendering_context() {
if let Some(css_id) = image.css_id() {
if let Some(element) = window_document().get_element_by_id(&css_id) {
let element = element.unchecked_into::<HtmlImageElement>();
context.save();
self.clip(&context);
let location = location.to_pixels(&self.scale).cast::<f64>();
context
.draw_image_with_html_image_element(&element, location.x, location.y)
.unwrap();
context.restore();
}
}
}
}
}
impl From<CanvasTransmogrifier> for RegisteredTransmogrifier {
fn from(transmogrifier: CanvasTransmogrifier) -> Self {
Self(Box::new(transmogrifier))
}
}
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
pub type ExtendedTextMetrics;
#[wasm_bindgen(method, getter, js_name = actualBoundingBoxAscent)]
pub fn actual_bounding_box_ascent(this: &ExtendedTextMetrics) -> f64;
#[wasm_bindgen(method, getter, js_name = actualBoundingBoxDescent)]
pub fn actual_bounding_box_descent(this: &ExtendedTextMetrics) -> f64;
#[wasm_bindgen(method, getter, js_name = actualBoundingBoxLeft)]
pub fn actual_bounding_box_left(this: &ExtendedTextMetrics) -> f64;
#[wasm_bindgen(method, getter, js_name = actualBoundingBoxRight)]
pub fn actual_bounding_box_right(this: &ExtendedTextMetrics) -> f64;
#[wasm_bindgen(method, getter)]
pub fn width(this: &ExtendedTextMetrics) -> f64;
}
impl From<web_sys::TextMetrics> for ExtendedTextMetrics {
fn from(tm: web_sys::TextMetrics) -> Self {
tm.unchecked_into()
}
}
impl ExtendedTextMetrics {
pub fn height(&self) -> f64 {
self.actual_bounding_box_ascent() + self.actual_bounding_box_descent()
}
}
#[derive(Debug, Default)]
pub struct State {
redraw_already_requested: Arc<AtomicBool>,
css: Option<CssRules>,
}
|
pub mod config;
pub mod resources;
pub(crate) mod dropout;
pub(crate) mod activations;
pub(crate) mod linear;
pub use config::Config; |
use draco::{Lazy, Mailbox, VNode};
use wasm_bindgen::{JsCast, UnwrapThrowExt};
use wasm_bindgen_test::*;
use web_sys as web;
wasm_bindgen_test_configure!(run_in_browser);
#[wasm_bindgen_test]
fn t_same_arg_but_different_function() {
let mailbox = Mailbox::new(|_| ());
let mut node_1: VNode<()> = Lazy::new(1, |&x| x.to_string().into()).into();
let web_node = node_1.create(&mailbox);
assert_eq!(
web_node
.dyn_into::<web::Text>()
.unwrap_throw()
.text_content()
.unwrap_throw(),
"1"
);
let mut node_2: VNode<()> = Lazy::new(1, |&x| (x + 1).to_string().into()).into();
let web_node = node_2.patch(&mut node_1, &mailbox);
assert_eq!(
web_node
.dyn_into::<web::Text>()
.unwrap_throw()
.text_content()
.unwrap_throw(),
"2"
);
}
|
// This file is part of syslog2. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/syslog2/master/COPYRIGHT. No part of syslog2, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2016 The developers of syslog2. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/syslog2/master/COPYRIGHT.
extern crate process;
use self::process::Process;
use rfc5424::TruncatedUsAsciiPrintableString;
use SyslogRfc;
const NILVALUE: &'static str = "-";
#[derive(Debug, Copy, Clone)]
pub enum SyslogRfcConstructor
{
Rfc3164 = 32,
Rfc5424 = 48,
}
impl SyslogRfcConstructor
{
pub fn new(&self, process: &Process, messageId: &str) -> SyslogRfc
{
let programName = &process.programName;
let truncatedPrintableUsAsciiProgramName = TruncatedUsAsciiPrintableString::new(if programName.is_empty()
{
NILVALUE
}
else
{
programName
}, *self as usize);
let truncatedPrintableProcessId = TruncatedUsAsciiPrintableString::new(&process.pid.to_string(), 128);
match *self
{
SyslogRfcConstructor::Rfc3164 =>
{
let hostNameWithoutDomain = &process.hostNameWithoutDomain;
let truncatedPrintableUsAsciiHostNameWithoutDomain = TruncatedUsAsciiPrintableString::new(if hostNameWithoutDomain.is_empty()
{
NILVALUE
}
else
{
hostNameWithoutDomain
}, 255);
SyslogRfc::Rfc3164
{
hostNameWithoutDomain: truncatedPrintableUsAsciiHostNameWithoutDomain,
appName: truncatedPrintableUsAsciiProgramName,
processId: truncatedPrintableProcessId,
}
},
SyslogRfcConstructor::Rfc5424 =>
{
let hostName = &process.hostName;
let truncatedPrintableUsAsciiHostName = TruncatedUsAsciiPrintableString::new(if hostName.is_empty()
{
NILVALUE
}
else
{
hostName
}, 255);
SyslogRfc::Rfc5424
{
hostName: truncatedPrintableUsAsciiHostName,
appName: truncatedPrintableUsAsciiProgramName,
processId: truncatedPrintableProcessId,
messageId: TruncatedUsAsciiPrintableString::new(if messageId.is_empty()
{
NILVALUE
}
else
{
messageId
}, 32),
}
},
}
}
} |
use futures::{future::BoxFuture, task::SpawnError};
use std::io;
use std::net::SocketAddr;
use std::pin::Pin;
use std::time::{Duration, Instant};
/// The Native runtime.
#[derive(Debug)]
pub struct Native;
impl runtime_raw::Runtime for Native {
fn spawn_boxed(&self, fut: BoxFuture<'static, ()>) -> Result<(), SpawnError> {
wasm_bindgen_futures::spawn_local(fut);
Ok(())
}
fn connect_tcp_stream(
&self,
_addr: &SocketAddr,
) -> BoxFuture<'static, io::Result<Pin<Box<dyn runtime_raw::TcpStream>>>> {
panic!("Connecting TCP streams is currently not supported in wasm");
}
fn bind_tcp_listener(
&self,
_addr: &SocketAddr,
) -> io::Result<Pin<Box<dyn runtime_raw::TcpListener>>> {
panic!("Binding TCP listeners is currently not supported in wasm");
}
fn bind_udp_socket(
&self,
_addr: &SocketAddr,
) -> io::Result<Pin<Box<dyn runtime_raw::UdpSocket>>> {
panic!("Binding UDP sockets is currently not supported in wasm");
}
fn new_delay(&self, _dur: Duration) -> Pin<Box<dyn runtime_raw::Delay>> {
panic!("Timers are currently not supported in wasm");
}
fn new_delay_at(&self, _at: Instant) -> Pin<Box<dyn runtime_raw::Delay>> {
panic!("Timers are currently not supported in wasm");
}
fn new_interval(&self, _dur: Duration) -> Pin<Box<dyn runtime_raw::Interval>> {
panic!("Timers are currently not supported in wasm");
}
}
|
use std::error::Error as StdError;
use std::fmt;
use std::io;
/// A type alias for `Result<T, tplink::Error>`.
pub type Result<T> = std::result::Result<T, Error>;
/// Errors that may occur while interacting with a device.
#[derive(Debug)]
pub struct Error {
kind: ErrorKind,
}
impl Error {
/// Creates a new `Error` for a given type of this error.
pub(crate) fn new(kind: ErrorKind) -> Error {
Error { kind }
}
/// Returns the specific type of this error.
pub fn kind(&self) -> &ErrorKind {
&self.kind
}
}
/// The specific type of an error.
#[derive(Debug)]
pub enum ErrorKind {
/// An I/O error that occurred while interacting with a device.
Io(io::Error),
/// An error of this kind occurs when performing automatic
/// serialization/deserialization with serde.
Json(serde_json::Error),
/// An error of this kind occurs when an operation requested by
/// the client is not supported by the device.
UnsupportedOperation(String),
/// An error of this kind occurs when a valid operation is
/// requested by the client with an invalid parameter.
InvalidParameter(String),
#[doc(hidden)]
__NonExhaustive,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.kind {
ErrorKind::Io(ref e) => e.fmt(f),
ErrorKind::Json(ref e) => e.fmt(f),
ErrorKind::UnsupportedOperation(ref op) => write!(f, "unsupported operation: {}", op),
ErrorKind::InvalidParameter(ref param) => write!(f, "invalid parameter: {}", param),
_ => unreachable!(),
}
}
}
impl StdError for Error {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self.kind {
ErrorKind::Io(ref e) => Some(e),
ErrorKind::Json(ref e) => Some(e),
_ => None,
}
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Error {
Error::new(ErrorKind::Io(e))
}
}
pub(crate) fn json(e: serde_json::Error) -> Error {
Error::new(ErrorKind::Json(e))
}
pub(crate) fn unsupported_operation(op: &str) -> Error {
Error::new(ErrorKind::UnsupportedOperation(op.into()))
}
pub(crate) fn invalid_parameter(param: &str) -> Error {
Error::new(ErrorKind::InvalidParameter(param.into()))
}
|
use std::cell::RefCell;
use std::sync::Arc;
use crate::global::*;
pub struct Point{
pub(crate) inner: sdl2::rect::Point,
pub(crate) index: usize,
pub(crate) temperature: Arc<RefCell<f64>>,
pub(crate) ptr: *const f64
}
unsafe impl Send for Point {}
unsafe impl Sync for Point {}
impl Point {
pub fn new(x: i32, y: i32) -> Self {
let mut p = Point {
inner: sdl2::rect::Point::new(x, y),
temperature: Arc::new(RefCell::new(0.0)),
index: x as usize * *HEIGHT + y as usize,
ptr: std::ptr::null()
};
p.ptr = p.temperature.as_ptr() as _;
p
}
pub fn to_sdl(&self) -> sdl2::rect::Point {
return self.inner;
}
pub fn temperature(&self) -> f64 {
unsafe {
*self.ptr
}
}
} |
use crate::{gui_component::*, prelude::*};
use raylib::prelude::*;
pub fn is_inside(position: Point, dimensions: Dimensions, mouse_position: Point) -> bool {
let rect_points = [
position,
(position.0 + dimensions.0, position.1),
(position.0, position.1 + dimensions.1),
(position.0 + dimensions.0, position.1 + dimensions.1),
];
(mouse_position.0 > rect_points[0].0 && mouse_position.0 > rect_points[2].0)
&& (mouse_position.0 < rect_points[1].0 && mouse_position.0 < rect_points[3].0)
&& (mouse_position.1 > rect_points[0].1 && mouse_position.1 > rect_points[1].1)
&& (mouse_position.1 < rect_points[2].1 && mouse_position.1 < rect_points[3].1)
}
/// Allows for the various components in `ptgui` to be contained within a single
/// collection rather than having seperate collections for each component that
/// can be drawn by the `GuiHandler<T>`.
#[derive(PartialEq)]
pub enum DrawableType {
Button(Button),
Slider(Slider),
Dropdown(Dropdown),
Label(Label),
}
impl DrawableType {
pub fn draw(&mut self, draw_handler: &mut RaylibDrawHandle) {
match self {
DrawableType::Button(b) => b.draw(draw_handler),
DrawableType::Slider(s) => s.draw(draw_handler),
DrawableType::Dropdown(d) => d.draw(draw_handler),
DrawableType::Label(l) => l.draw(draw_handler),
}
}
pub fn is_clicked(
&mut self,
mouse_position: Point,
actions: &mut Vec<String>,
draw_handler: &RaylibDrawHandle,
) {
match self {
DrawableType::Button(b) => actions.push(b.is_clicked(
mouse_position,
draw_handler.is_mouse_button_pressed(MouseButton::MOUSE_LEFT_BUTTON),
)),
DrawableType::Slider(s) => s.is_clicked(
mouse_position,
draw_handler.is_mouse_button_down(MouseButton::MOUSE_LEFT_BUTTON),
),
DrawableType::Dropdown(d) => {
d.is_clicked(
mouse_position,
draw_handler.is_mouse_button_pressed(MouseButton::MOUSE_LEFT_BUTTON),
);
actions.append(&mut d.actions)
},
_ => (),
}
}
pub fn get_position(&self) -> Point {
match self {
DrawableType::Button(b) => b.position,
DrawableType::Slider(s) => s.position,
DrawableType::Dropdown(d) => d.position,
DrawableType::Label(l) => l.position,
}
}
pub fn get_dimensions(&self) -> Dimensions {
match self {
DrawableType::Button(b) => b.dimensions,
DrawableType::Slider(s) => s.dimensions,
DrawableType::Dropdown(d) => d.dimensions,
DrawableType::Label(l) => l.dimensions,
}
}
pub fn resize(&mut self, new_dimensions: Dimensions) {
match self {
DrawableType::Button(b) => b.resize(new_dimensions),
DrawableType::Slider(s) => s.resize(new_dimensions),
DrawableType::Dropdown(d) => d.resize(new_dimensions),
DrawableType::Label(l) => l.resize(new_dimensions),
}
}
pub fn move_x(&mut self, new_x_pos: i32) {
match self {
DrawableType::Button(b) =>
if b.position.0 != new_x_pos {
b.position.0 = new_x_pos;
},
DrawableType::Slider(s) =>
if s.position.0 != new_x_pos {
s.move_x(new_x_pos);
},
DrawableType::Dropdown(d) =>
if d.position.0 != new_x_pos {
d.position.0 = new_x_pos;
},
DrawableType::Label(l) =>
if l.position.0 != new_x_pos {
l.position.0 = new_x_pos;
},
}
}
}
|
use super::*;
#[derive(Clone, Debug, PartialEq, Ord, PartialOrd, Eq)]
struct MyData {
pub id: u32,
pub data: u32,
}
impl Entity<u32> for MyData {
fn get_id(&self) -> u32 {
self.id
}
}
#[test]
fn should_save_entity_in_storage() {
// given
let mut storage = InMemoryStorage::new();
let data = MyData { id: 8, data: 42 };
// when
storage.save(&data).unwrap();
// then
assert_eq!(&data, storage.db.get(&8).unwrap());
}
#[test]
fn should_find_by_id() {
// given
let data = MyData { id: 42, data: 42 };
let storage = InMemoryStorage::from(&vec![data.clone()]);
// when
let result = storage.find_by_id(&42u32).unwrap();
// then
assert_eq!(data, result);
}
#[test]
fn should_return_not_found() {
// given
let storage: InMemoryStorage<u32, MyData> = InMemoryStorage::new();
// when
let result = storage.find_by_id(&123u32);
// then
match result {
Err(InMemoryStorageError::EntityNotFound { entity_id: id }) => {
assert_eq!(id, "123".to_owned())
}
_ => assert!(false),
}
}
#[test]
fn should_find_and_paginate() {
// given
let data = vec![
MyData { id: 1, data: 42 },
MyData { id: 2, data: 42 },
MyData { id: 3, data: 42 },
];
let storage = InMemoryStorage::from(&data);
// when
let result = dbg!(storage
.find_all_with_page_and_sort(&Page::new(0, 1), &Sort::DESCENDING)
.unwrap());
// then
assert_eq!(1, result.len());
assert_eq!(3u32, result.first().unwrap().id);
}
#[test]
fn should_update_entity() {
// given
let mut storage: InMemoryStorage<u32, MyData> =
InMemoryStorage::from(&vec![MyData { id: 1, data: 42 }]);
let updated = MyData { id: 1, data: 24 };
// when
storage.update(&updated).unwrap();
// then
assert_eq!(&updated, storage.db.get(&1u32).unwrap());
}
#[test]
fn should_remove_entity_by_id() {
// given
let mut storage = InMemoryStorage::from(&MyData { id: 1, data: 42 });
// when
storage.remove_by_id(&1).unwrap();
// then
assert!(storage.db.is_empty());
}
#[test]
fn should_remove_entity() {
let entity = MyData { id: 1, data: 42 };
let mut storage = InMemoryStorage::from(&entity);
// when
storage.remove(&entity).unwrap();
// then
assert!(storage.db.is_empty());
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[doc(hidden)]
pub struct IIndexedResourceCandidate(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IIndexedResourceCandidate {
type Vtable = IIndexedResourceCandidate_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x0e619ef3_faec_4414_a9d7_54acd5953f29);
}
#[repr(C)]
#[doc(hidden)]
pub struct IIndexedResourceCandidate_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut IndexedResourceType) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
#[cfg(feature = "Foundation_Collections")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation_Collections"))] usize,
#[cfg(feature = "Foundation_Collections")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation_Collections"))] usize,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, qualifiername: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IIndexedResourceQualifier(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IIndexedResourceQualifier {
type Vtable = IIndexedResourceQualifier_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xdae3bb9b_d304_497f_a168_a340042c8adb);
}
#[repr(C)]
#[doc(hidden)]
pub struct IIndexedResourceQualifier_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IResourceIndexer(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IResourceIndexer {
type Vtable = IResourceIndexer_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x2d4cf9a5_e32f_4ab2_8748_96350a016da3);
}
#[repr(C)]
#[doc(hidden)]
pub struct IResourceIndexer_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, filepath: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
#[cfg(all(feature = "Foundation", feature = "Foundation_Collections"))] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, file: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(all(feature = "Foundation", feature = "Foundation_Collections")))] usize,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IResourceIndexerFactory(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IResourceIndexerFactory {
type Vtable = IResourceIndexerFactory_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xb8de3f09_31cd_4d97_bd30_8d39f742bc61);
}
#[repr(C)]
#[doc(hidden)]
pub struct IResourceIndexerFactory_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, projectroot: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IResourceIndexerFactory2(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IResourceIndexerFactory2 {
type Vtable = IResourceIndexerFactory2_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x6040f18d_d5e5_4b60_9201_cd279cbcfed9);
}
#[repr(C)]
#[doc(hidden)]
pub struct IResourceIndexerFactory2_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, projectroot: ::windows::core::RawPtr, extensiondllpath: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IndexedResourceCandidate(pub ::windows::core::IInspectable);
impl IndexedResourceCandidate {
pub fn Type(&self) -> ::windows::core::Result<IndexedResourceType> {
let this = self;
unsafe {
let mut result__: IndexedResourceType = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<IndexedResourceType>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn Uri(&self) -> ::windows::core::Result<super::super::super::Foundation::Uri> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Foundation::Uri>(result__)
}
}
#[cfg(feature = "Foundation_Collections")]
pub fn Metadata(&self) -> ::windows::core::Result<super::super::super::Foundation::Collections::IMapView<::windows::core::HSTRING, ::windows::core::HSTRING>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Foundation::Collections::IMapView<::windows::core::HSTRING, ::windows::core::HSTRING>>(result__)
}
}
#[cfg(feature = "Foundation_Collections")]
pub fn Qualifiers(&self) -> ::windows::core::Result<super::super::super::Foundation::Collections::IVectorView<IndexedResourceQualifier>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Foundation::Collections::IVectorView<IndexedResourceQualifier>>(result__)
}
}
pub fn ValueAsString(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).10)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn GetQualifierValue<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, qualifiername: Param0) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).11)(::core::mem::transmute_copy(this), qualifiername.into_param().abi(), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
}
unsafe impl ::windows::core::RuntimeType for IndexedResourceCandidate {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.ApplicationModel.Resources.Management.IndexedResourceCandidate;{0e619ef3-faec-4414-a9d7-54acd5953f29})");
}
unsafe impl ::windows::core::Interface for IndexedResourceCandidate {
type Vtable = IIndexedResourceCandidate_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x0e619ef3_faec_4414_a9d7_54acd5953f29);
}
impl ::windows::core::RuntimeName for IndexedResourceCandidate {
const NAME: &'static str = "Windows.ApplicationModel.Resources.Management.IndexedResourceCandidate";
}
impl ::core::convert::From<IndexedResourceCandidate> for ::windows::core::IUnknown {
fn from(value: IndexedResourceCandidate) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&IndexedResourceCandidate> for ::windows::core::IUnknown {
fn from(value: &IndexedResourceCandidate) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IndexedResourceCandidate {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IndexedResourceCandidate {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<IndexedResourceCandidate> for ::windows::core::IInspectable {
fn from(value: IndexedResourceCandidate) -> Self {
value.0
}
}
impl ::core::convert::From<&IndexedResourceCandidate> for ::windows::core::IInspectable {
fn from(value: &IndexedResourceCandidate) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for IndexedResourceCandidate {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a IndexedResourceCandidate {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
unsafe impl ::core::marker::Send for IndexedResourceCandidate {}
unsafe impl ::core::marker::Sync for IndexedResourceCandidate {}
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IndexedResourceQualifier(pub ::windows::core::IInspectable);
impl IndexedResourceQualifier {
pub fn QualifierName(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn QualifierValue(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
}
unsafe impl ::windows::core::RuntimeType for IndexedResourceQualifier {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.ApplicationModel.Resources.Management.IndexedResourceQualifier;{dae3bb9b-d304-497f-a168-a340042c8adb})");
}
unsafe impl ::windows::core::Interface for IndexedResourceQualifier {
type Vtable = IIndexedResourceQualifier_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xdae3bb9b_d304_497f_a168_a340042c8adb);
}
impl ::windows::core::RuntimeName for IndexedResourceQualifier {
const NAME: &'static str = "Windows.ApplicationModel.Resources.Management.IndexedResourceQualifier";
}
impl ::core::convert::From<IndexedResourceQualifier> for ::windows::core::IUnknown {
fn from(value: IndexedResourceQualifier) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&IndexedResourceQualifier> for ::windows::core::IUnknown {
fn from(value: &IndexedResourceQualifier) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IndexedResourceQualifier {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IndexedResourceQualifier {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<IndexedResourceQualifier> for ::windows::core::IInspectable {
fn from(value: IndexedResourceQualifier) -> Self {
value.0
}
}
impl ::core::convert::From<&IndexedResourceQualifier> for ::windows::core::IInspectable {
fn from(value: &IndexedResourceQualifier) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for IndexedResourceQualifier {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a IndexedResourceQualifier {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
unsafe impl ::core::marker::Send for IndexedResourceQualifier {}
unsafe impl ::core::marker::Sync for IndexedResourceQualifier {}
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct IndexedResourceType(pub i32);
impl IndexedResourceType {
pub const String: IndexedResourceType = IndexedResourceType(0i32);
pub const Path: IndexedResourceType = IndexedResourceType(1i32);
pub const EmbeddedData: IndexedResourceType = IndexedResourceType(2i32);
}
impl ::core::convert::From<i32> for IndexedResourceType {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for IndexedResourceType {
type Abi = Self;
}
unsafe impl ::windows::core::RuntimeType for IndexedResourceType {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"enum(Windows.ApplicationModel.Resources.Management.IndexedResourceType;i4)");
}
impl ::windows::core::DefaultType for IndexedResourceType {
type DefaultType = Self;
}
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct ResourceIndexer(pub ::windows::core::IInspectable);
impl ResourceIndexer {
#[cfg(feature = "deprecated")]
#[cfg(feature = "Foundation")]
pub fn IndexFilePath<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::Uri>>(&self, filepath: Param0) -> ::windows::core::Result<IndexedResourceCandidate> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), filepath.into_param().abi(), &mut result__).from_abi::<IndexedResourceCandidate>(result__)
}
}
#[cfg(feature = "deprecated")]
#[cfg(all(feature = "Foundation", feature = "Foundation_Collections"))]
pub fn IndexFileContentsAsync<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::Uri>>(&self, file: Param0) -> ::windows::core::Result<super::super::super::Foundation::IAsyncOperation<super::super::super::Foundation::Collections::IVectorView<IndexedResourceCandidate>>> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), file.into_param().abi(), &mut result__).from_abi::<super::super::super::Foundation::IAsyncOperation<super::super::super::Foundation::Collections::IVectorView<IndexedResourceCandidate>>>(result__)
}
}
#[cfg(feature = "deprecated")]
#[cfg(feature = "Foundation")]
pub fn CreateResourceIndexer<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::Uri>>(projectroot: Param0) -> ::windows::core::Result<ResourceIndexer> {
Self::IResourceIndexerFactory(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), projectroot.into_param().abi(), &mut result__).from_abi::<ResourceIndexer>(result__)
})
}
#[cfg(feature = "deprecated")]
#[cfg(feature = "Foundation")]
pub fn CreateResourceIndexerWithExtension<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::Uri>, Param1: ::windows::core::IntoParam<'a, super::super::super::Foundation::Uri>>(projectroot: Param0, extensiondllpath: Param1) -> ::windows::core::Result<ResourceIndexer> {
Self::IResourceIndexerFactory2(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), projectroot.into_param().abi(), extensiondllpath.into_param().abi(), &mut result__).from_abi::<ResourceIndexer>(result__)
})
}
pub fn IResourceIndexerFactory<R, F: FnOnce(&IResourceIndexerFactory) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<ResourceIndexer, IResourceIndexerFactory> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
pub fn IResourceIndexerFactory2<R, F: FnOnce(&IResourceIndexerFactory2) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<ResourceIndexer, IResourceIndexerFactory2> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
}
unsafe impl ::windows::core::RuntimeType for ResourceIndexer {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.ApplicationModel.Resources.Management.ResourceIndexer;{2d4cf9a5-e32f-4ab2-8748-96350a016da3})");
}
unsafe impl ::windows::core::Interface for ResourceIndexer {
type Vtable = IResourceIndexer_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x2d4cf9a5_e32f_4ab2_8748_96350a016da3);
}
impl ::windows::core::RuntimeName for ResourceIndexer {
const NAME: &'static str = "Windows.ApplicationModel.Resources.Management.ResourceIndexer";
}
impl ::core::convert::From<ResourceIndexer> for ::windows::core::IUnknown {
fn from(value: ResourceIndexer) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&ResourceIndexer> for ::windows::core::IUnknown {
fn from(value: &ResourceIndexer) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for ResourceIndexer {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a ResourceIndexer {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<ResourceIndexer> for ::windows::core::IInspectable {
fn from(value: ResourceIndexer) -> Self {
value.0
}
}
impl ::core::convert::From<&ResourceIndexer> for ::windows::core::IInspectable {
fn from(value: &ResourceIndexer) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for ResourceIndexer {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a ResourceIndexer {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
unsafe impl ::core::marker::Send for ResourceIndexer {}
unsafe impl ::core::marker::Sync for ResourceIndexer {}
|
fn main() {
// Data move
// let s1 = String::from("Hello"); // Data is on heap
// let s2 = s1; // Data is on heap - Shallow copy
// // print!("{}", s1); //This is an error
// print!("{}", s2);
// Data clone
// let s1 = String::from("hello");
// let s2 = s1.clone(); // Data is on heap - Deep copy
// let s3 = String::new("hi"); Not possible
let s1 = String::from("Hello");
let s2 = s1;
print!("{}", s1); //---> Why this is an error ?
let a = "hello";
let b = a;
print!("{}", a); //--> But why this is not ?
let x:i32 = 5;
let y:i32 = x;
println!("x = {}, y = {}", x, y); // This works
let mut s = String::from("hello"); // s comes into scope
// takes_ownership(&mut s); // s's value moves into the function...
// ... and so is no longer valid here
println!("{}", s);
let x = 5; // x comes into scope
// makes_copy(x); // x would move into the function,
// but i32 is Copy, so it’s okay to still
// use x afterward
println!("{}", x);
let mut i = String::from("hi ");
let j = &mut i; // passed a mutable refrence to a mutable variable j
println!("j : {}", *j);
// let k = &mut i;
// j.push_str("welcome"); // modified j
// println!("j : {}", j);
// println!("i {}", k);
// println!("i {}", j);
// let k = &i; // Why is this not permissible ?
// k.push_str(" again welcome");
// println!("k : {} ",k);
// println!("j : {} ",j);
let mut s = String::from("hello");
let r1 = &s; // no problem
let r2 = &s; // no problem
println!("{} and {}", r1, r2);
// r1 and r2 are no longer used after this point
let r3 = &mut s; // no problem
println!("{}", r3);
let s = String::from("hello");
let len = s.len();
// let slice = &s[0..len];
let slice = &s[..];
println!("{}", slice);
// let l = &mut i;
// let m = &mut i; // And not this ??
// println!("{} {} {}", i, j,k);
// println!("{}",l);
// j.push_str(" again welcome");
// let m = &mut i;
// j.push_str("welcome");
}
fn takes_ownership(some_string: &mut String) -> String {
// some_string comes into scope
some_string.push_str("sample");
println!("{}", some_string);
// some_string.push_str(", World");
some_string.to_string()
} // Here, some_string goes out of scope and `drop` is called. The backing
// memory is freed.
fn makes_copy(some_integer: i32) {
// some_integer comes into scope
println!("{}", some_integer);
} // Here, some_integer goes out of scope. Nothing special happens.
|
#[doc = "Reader of register PLL3DIVR"]
pub type R = crate::R<u32, super::PLL3DIVR>;
#[doc = "Writer for register PLL3DIVR"]
pub type W = crate::W<u32, super::PLL3DIVR>;
#[doc = "Register PLL3DIVR `reset()`'s with value 0x0101_0280"]
impl crate::ResetValue for super::PLL3DIVR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0101_0280
}
}
#[doc = "Reader of field `DIVN3`"]
pub type DIVN3_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `DIVN3`"]
pub struct DIVN3_W<'a> {
w: &'a mut W,
}
impl<'a> DIVN3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01ff) | ((value as u32) & 0x01ff);
self.w
}
}
#[doc = "Reader of field `DIVP3`"]
pub type DIVP3_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DIVP3`"]
pub struct DIVP3_W<'a> {
w: &'a mut W,
}
impl<'a> DIVP3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 9)) | (((value as u32) & 0x7f) << 9);
self.w
}
}
#[doc = "Reader of field `DIVQ3`"]
pub type DIVQ3_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DIVQ3`"]
pub struct DIVQ3_W<'a> {
w: &'a mut W,
}
impl<'a> DIVQ3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 16)) | (((value as u32) & 0x7f) << 16);
self.w
}
}
#[doc = "Reader of field `DIVR3`"]
pub type DIVR3_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DIVR3`"]
pub struct DIVR3_W<'a> {
w: &'a mut W,
}
impl<'a> DIVR3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 24)) | (((value as u32) & 0x7f) << 24);
self.w
}
}
impl R {
#[doc = "Bits 0:8 - Multiplication factor for PLL1 VCO"]
#[inline(always)]
pub fn divn3(&self) -> DIVN3_R {
DIVN3_R::new((self.bits & 0x01ff) as u16)
}
#[doc = "Bits 9:15 - PLL DIVP division factor"]
#[inline(always)]
pub fn divp3(&self) -> DIVP3_R {
DIVP3_R::new(((self.bits >> 9) & 0x7f) as u8)
}
#[doc = "Bits 16:22 - PLL DIVQ division factor"]
#[inline(always)]
pub fn divq3(&self) -> DIVQ3_R {
DIVQ3_R::new(((self.bits >> 16) & 0x7f) as u8)
}
#[doc = "Bits 24:30 - PLL DIVR division factor"]
#[inline(always)]
pub fn divr3(&self) -> DIVR3_R {
DIVR3_R::new(((self.bits >> 24) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:8 - Multiplication factor for PLL1 VCO"]
#[inline(always)]
pub fn divn3(&mut self) -> DIVN3_W {
DIVN3_W { w: self }
}
#[doc = "Bits 9:15 - PLL DIVP division factor"]
#[inline(always)]
pub fn divp3(&mut self) -> DIVP3_W {
DIVP3_W { w: self }
}
#[doc = "Bits 16:22 - PLL DIVQ division factor"]
#[inline(always)]
pub fn divq3(&mut self) -> DIVQ3_W {
DIVQ3_W { w: self }
}
#[doc = "Bits 24:30 - PLL DIVR division factor"]
#[inline(always)]
pub fn divr3(&mut self) -> DIVR3_W {
DIVR3_W { w: self }
}
}
|
use csv::{ReaderBuilder, StringRecord, WriterBuilder};
use futures::future::join_all;
use tokio::sync::Semaphore;
use std::path::Path;
use std::sync::Mutex;
use indicatif::ProgressBar;
use reqwest::Client;
use std::iter::Iterator;
use std::error::Error;
use std::sync::Arc;
use serde_json::Value;
use std::fmt::{Formatter, Display};
use std::time::Duration;
#[derive(Default, Clone)]
pub struct DataFrame {
path: String,
headers: Vec<String>,
pub shape: (usize, usize),
delimiter: char,
pub prefix: String,
// The DATA
data: Vec<Vec<String>>,
// Indexes for matching, fetching, etc (index for data)
id: Option<usize>,
addr1: Option<usize>,
addr2: Option<usize>,
city: Option<usize>,
state: Option<usize>,
zipcode: Option<usize>,
// Columns (because lat and lng have different type) Excluded from headers
lat: Option<Vec<f64>>,
lng: Option<Vec<f64>>,
// Additional Output columns
pub output_cols: Vec<usize>,
compare_cols: Vec<usize>
}
impl Display for DataFrame {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
writeln!(f, "{{")?;
writeln!(f, "\tpath:\t{}", self.path)?;
writeln!(f, "\tprefix:\t{}\n", self.prefix)?;
writeln!(f, "\taddr1:\t\t{}", self.addr1.map_or("None".to_string(), |e| e.to_string()))?;
writeln!(f, "\taddr2:\t\t{}", self.addr2.map_or("None".to_string(), |e| e.to_string()))?;
writeln!(f, "\tcity:\t\t{}", self.city.map_or("None".to_string(), |e| e.to_string()))?;
writeln!(f, "\tstate:\t\t{}", self.state.map_or("None".to_string(), |e| e.to_string()))?;
writeln!(f, "\tzipcode:\t{}\n", self.zipcode.map_or("None".to_string(), |e| e.to_string()))?;
writeln!(f, "\tlat:\t{}", self.lat.as_ref().map_or("Not Found", |_| "Found"))?;
writeln!(f, "\tlng:\t{}\n", self.lng.as_ref().map_or("Not Found", |_| "Found"))?;
writeln!(f, "\toutput_cols: {{")?;
for col in self.output_cols.iter() {
writeln!(f, "\t\t{}", self.headers[*col])?;
}
writeln!(f, "\t}}")?;
writeln!(f, "\tcompare_cols: {{")?;
for col in self.compare_cols.iter() {
writeln!(f, "\t\t{}", self.headers[*col])?;
}
writeln!(f, "\t}}")?;
writeln!(f, "}}")?;
Ok(())
}
}
impl DataFrame {
// CONSTRUCTORS
pub fn from_path(path: &str) -> DataFrame {
// Try to guess delimiter based on number of headers returned
let comma_count = {
let mut reader = ReaderBuilder::new()
.delimiter(b',')
.from_path(path)
.unwrap();
reader.headers().unwrap().iter().count()
};
let pipe_count = {
let mut reader = ReaderBuilder::new()
.delimiter(b'|')
.from_path(path)
.unwrap();
reader.headers().unwrap().iter().count()
};
let delimiter = if pipe_count > comma_count {'|'} else {','};
// Read in the file for further analysis
let (mut headers, width, height) = {
let mut reader = ReaderBuilder::new()
.delimiter(delimiter as u8)
.from_path(path)
.unwrap();
// Get headers and size information
let headers = reader.headers().unwrap()
.iter()
.map(|e| e.to_string())
.collect::<Vec<String>>();
let width = headers.len();
let height = reader.records().count();
(headers, width, height)
};
// Map headers to special column values
let mut id = None;
let mut addr1 = None;
let mut addr2 = None;
let mut city = None;
let mut state = None;
let mut zipcode = None;
let mut lat = None;
let mut lng = None;
for (index, header) in headers.iter().enumerate() {
let lower = header.to_lowercase();
let trim = lower.trim();
let match_str = trim.replace(" ", "");
match match_str.as_str() {
"id" => {
id = Some(index);
}
"addr1" | "address" | "addr" => {
addr1 = Some(index);
}
"addr2" | "address2" => {
addr2 = Some(index);
}
"city" => {
city = Some(index);
}
"state" => {
state = Some(index);
}
"zipcode" | "zip" | "postalcode" => {
zipcode = Some(index);
}
"lat" | "latitude" => {
lat = Some(index);
}
"lng" | "longitude" => {
lng = Some(index);
}
_ => {}
}
}
// Modify headers removing lat and lng columns
if lat.is_some() && lng.is_some() {
let lat = lat.unwrap();
let lng = lng.unwrap();
if lat > lng {
headers.remove(lng);
headers.remove(lat-1);
} else {
headers.remove(lat);
headers.remove(lng-1);
}
} else if let Some(index) = lat {
headers.remove(index);
} else if let Some(index) = lng {
headers.remove(index);
}
// Create empty data vec with capacity for data
let mut data = Vec::with_capacity(headers.len());
for _ in 0..headers.len() {
data.push(Vec::with_capacity(height));
}
let mut data_frame = DataFrame {
path: path.to_string(),
headers,
shape: (width, height),
delimiter,
id,
addr1,
addr2,
city,
state,
zipcode,
data,
..DataFrame::default()
};
// Read all data into dataframe
let mut reader = ReaderBuilder::new()
.delimiter(delimiter as u8)
.from_path(path)
.unwrap();
// Create vectors for lat/lng if needed
if lat.is_some() {
data_frame.lat = Some(Vec::with_capacity(data_frame.shape.1));
}
if lng.is_some() {
data_frame.lng = Some(Vec::with_capacity(data_frame.shape.1));
}
// Add all data to correct vector
for record in reader.records() {
let mut offset=0;
let record = record.unwrap();
for (col, item) in record.iter().enumerate() {
if lat.is_some() && col==lat.unwrap() {
data_frame.lat.as_mut().unwrap().push(item.parse::<f64>().unwrap_or(f64::NAN));
offset += 1;
} else if lng.is_some() && col==lng.unwrap() {
data_frame.lng.as_mut().unwrap().push(item.parse::<f64>().unwrap_or(f64::NAN));
offset += 1;
} else {
data_frame.data[col-offset].push(item.to_string());
}
}
}
data_frame
}
pub fn with_capacity(width: usize, height: usize) -> DataFrame {
let mut data = Vec::with_capacity(width);
for _ in 0..width {
data.push(Vec::with_capacity(height));
}
DataFrame {
data,
lat: Some(Vec::with_capacity(height)),
lng: Some(Vec::with_capacity(height)),
..DataFrame::default()
}
}
fn get_col_index(&self, col: &str) -> Result<usize, Box<dyn Error>> {
let col_option = self.headers.iter()
.enumerate()
.find(|e| e.1.eq(col));
if let Some((index, _)) = col_option {
Ok(index)
} else {
return Err(format!("No column named {}", col))?;
}
}
// BOOLEAN CHECKS
pub fn ready_to_fetch(&self) -> bool {
self.addr1.is_some() &&
self.city.is_some() &&
self.state.is_some()
}
pub fn ready_to_match(&self) -> bool {
self.lat.is_some() &&
self.lng.is_some()
}
// GETTERS
pub fn get_headers(&self) -> &Vec<String> {
&self.headers
}
pub fn set_headers(&mut self, headers: Vec<String>) {
self.headers = headers;
}
// Special Columns
pub fn id(&self) -> Option<&Vec<String>> {
if self.id.is_none() {return None;}
Some(&self.data[self.id.unwrap()])
}
pub fn addr1(&self) -> Option<&Vec<String>> {
if self.addr1.is_none() {return None;}
Some(&self.data[self.addr1.unwrap()])
}
pub fn addr2(&self) -> Option<&Vec<String>> {
if self.addr2.is_none() {return None;}
Some(&self.data[self.addr2.unwrap()])
}
pub fn city(&self) -> Option<&Vec<String>> {
if self.city.is_none() {return None;}
Some(&self.data[self.city.unwrap()])
}
pub fn state(&self) -> Option<&Vec<String>> {
if self.state.is_none() {return None;}
Some(&self.data[self.state.unwrap()])
}
pub fn zipcode(&self) -> Option<&Vec<String>> {
if self.zipcode.is_none() {return None;}
Some(&self.data[self.zipcode.unwrap()])
}
pub fn lat(&self) -> Option<&Vec<f64>> {
if self.lat.is_none() {return None;}
Some(self.lat.as_ref().unwrap())
}
pub fn lat_mut(&mut self) -> Option<&mut Vec<f64>> {
if self.lat.is_none() {return None;}
Some(self.lat.as_mut().unwrap())
}
pub fn lng(&self) -> Option<&Vec<f64>> {
if self.lng.is_none() {return None;}
Some(self.lng.as_ref().unwrap())
}
pub fn lng_mut(&mut self) ->Option<&mut Vec<f64>> {
if self.lng.is_none() {return None;}
Some(self.lng.as_mut().unwrap())
}
pub fn data(&self) -> &Vec<Vec<String>> {
&self.data
}
pub fn data_mut(&mut self) -> &mut Vec<Vec<String>> {
&mut self.data
}
// SETTERS
pub fn add_output_column(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.output_cols.push(self.get_col_index(col)?);
Ok(())
}
pub fn add_compare_column(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.compare_cols.push(self.get_col_index(col)?);
Ok(())
}
pub fn set_prefix(&mut self, prefix: &str) {
self.prefix = prefix.to_string();
}
// Special columns
pub fn set_id(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.id = Some(self.get_col_index(col)?);
Ok(())
}
pub fn set_addr1(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.addr1 = Some(self.get_col_index(col)?);
Ok(())
}
pub fn set_addr2(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.addr2 = Some(self.get_col_index(col)?);
Ok(())
}
pub fn set_city(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.city = Some(self.get_col_index(col)?);
Ok(())
}
pub fn set_state(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.state = Some(self.get_col_index(col)?);
Ok(())
}
pub fn set_zipcode(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
self.zipcode = Some(self.get_col_index(col)?);
Ok(())
}
pub fn set_lat(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
let index = self.get_col_index(col)?;
let mut column = self.data.remove(index);
self.lat = Some(column.iter_mut().map(|e| e.parse::<f64>().unwrap()).collect());
self.headers.remove(index);
Ok(())
}
pub fn set_lng(&mut self, col: &str) -> Result<(), Box<dyn Error>> {
let index = self.get_col_index(col)?;
let mut column = self.data.remove(index);
self.lng = Some(column.iter_mut().map(|e| e.parse::<f64>().unwrap()).collect());
self.headers.remove(index);
Ok(())
}
pub async fn fetch(&mut self, key: String) -> Result<(), Box<dyn Error>> {
println!("Fetching {} coords for {}:", self.shape.1, self.path);
// collect addresses into a vec
let mut addresses = Vec::with_capacity(self.shape.1);
for row in 0..self.shape.1 {
addresses.push(self.get_address(row));
}
// Google's geocoding api will block us if we exceed 50 requests per second
let requests_per_second: usize = 30;
let dur = Duration::from_secs_f64(1.0/(requests_per_second as f64));
let mut clock = tokio::time::interval(dur);
// Semaphore to make sure we don't max out open http connections
let sem = Arc::new(Semaphore::new(30));
// Collection of async tasks which we will join on
let mut tasks = Vec::with_capacity(self.shape.1);
// Progress bar to track fetching progress
let bar = Arc::new(Mutex::new(ProgressBar::new(self.shape.1 as u64)));
// Shared client for http requests
let client = Arc::new(Client::new());
for row in 0..self.shape.1 {
let bar_clone = bar.clone();
let client_clone = client.clone();
let addr = self.get_address(row);
let key_clone = key.clone();
let sem_clone = sem.clone();
// Rate limit
clock.tick().await;
tasks.push(tokio::spawn(async move {
if addr.is_none() {
bar_clone.lock().unwrap().inc(1);
return (f64::NAN, f64::NAN, "".to_string());
}
let _permit = sem_clone.acquire().await.unwrap();
let res = fetch_single(&client_clone, addr.unwrap().as_str(), key_clone.as_str()).await.unwrap();
bar_clone.lock().unwrap().inc(1);
res
}));
}
let results = join_all(tasks).await;
bar.lock().unwrap().finish();
// Add lat and lng rows
self.lat = Some(Vec::with_capacity(self.shape.1));
self.lng = Some(Vec::with_capacity(self.shape.1));
// Add row for normalized address
self.headers.push("norm_address".to_string());
self.data.push(Vec::with_capacity(self.shape.1));
let addr_row = self.data.last_mut().unwrap();
for result in results {
let (lat, lng, addr) = result.unwrap();
self.lat.as_mut().unwrap().push(lat);
self.lng.as_mut().unwrap().push(lng);
addr_row.push(addr);
}
// Output File
let path = Path::new(self.path.as_str());
let path = format!("{}_coords.csv", path.file_stem().unwrap().to_str().unwrap());
println!("Writing output to {}.", path);
let mut writer = WriterBuilder::new()
.delimiter(self.delimiter as u8)
.from_path(path)?;
// Print Headers
let mut new_headers = StringRecord::new();
for header in self.headers.iter() {
new_headers.push_field(header);
}
new_headers.push_field("lat");
new_headers.push_field("lng");
writer.write_record(&new_headers)?;
let width = self.data.len();
let height = self.data[0].len();
// Print data with lat, lng pairs
for row in 0..height {
let mut record = StringRecord::new();
for col in 0..width {
record.push_field(self.data[col][row].as_str());
}
record.push_field(self.lat.as_ref().unwrap()[row].to_string().as_str());
record.push_field(self.lng.as_ref().unwrap()[row].to_string().as_str());
writer.write_record(&record)?;
}
writer.flush()?;
Ok(())
}
fn get_address(&self, row: usize) -> Option<String> {
let addr1 = self.data[self.addr1.unwrap()][row].as_str();
let city = self.data[self.city.unwrap()][row].as_str();
let state = self.data[self.state.unwrap()][row].as_str();
let mut parts = vec![addr1, city, state];
if parts.iter().map(|e| e.trim()).any(|e| e.is_empty()) {
return None;
}
if let Some(zipcode) = self.zipcode {
let zipcode = self.data[zipcode][row].as_str();
parts.push(zipcode);
}
if let Some(addr2) = self.addr2 {
let addr2 = self.data[addr2][row].as_str();
parts.insert(1, addr2);
}
Some(parts.join(" "))
}
pub fn output_headers(&self) -> Vec<String> {
let mut headers = Vec::new();
for col in self.output_cols.iter() {
if self.prefix.is_empty() {
headers.push(self.headers[*col].clone())
} else {
headers.push(format!("{}_{}", self.prefix, self.headers[*col].clone()));
}
}
headers
}
pub fn output_row(&self, row: usize) -> Vec<String> {
let mut output_row = Vec::new();
for col in self.output_cols.iter() {
output_row.push(self.data[*col][row].clone());
}
output_row
}
pub fn compare_row(&self, row: usize) -> Vec<String> {
let mut compare_row = Vec::new();
for col in self.compare_cols.iter() {
compare_row.push(self.data[*col][row].clone());
}
compare_row
}
pub fn remove_row(&mut self, row: usize) {
if let Some(lat) = &mut self.lat {
lat.remove(row);
}
if let Some(lng) = &mut self.lng {
lng.remove(row);
}
for col in self.data.iter_mut() {
col.remove(row);
}
self.shape.1 -= 1;
}
}
async fn fetch_single(client: &Client, addr: &str, key: &str) -> Result<(f64, f64, String), Box<dyn Error>> {
let params = [("address", addr), ("key", key)];
let res = client.get("https://maps.googleapis.com/maps/api/geocode/json")
.query(¶ms)
.send()
.await?;
if !res.status().is_success() {
println!("error fetching {}", addr);
}
let text = res.text().await?;
let json: Value = serde_json::from_str(text.as_str()).unwrap();
let lat = json["results"][0]["geometry"]["location"]["lat"].as_f64();
let lng = json["results"][0]["geometry"]["location"]["lng"].as_f64();
let addr = json["results"][0]["formatted_address"].as_str();
if lat.is_some() || lng.is_some() {
let lat = lat.unwrap();
let lng = lng.unwrap();
let addr = addr.unwrap_or("").to_string();
Ok((lat, lng, addr))
} else {
println!("{}", json);
if let Some(status) = json["status"].as_str() {
if status=="OVER_QUERY_LIMIT" {
println!("\nMaxed Out API KEY\n");
}
}
Ok((f64::NAN, f64::NAN, "".to_string()))
}
}
|
pub mod schema;
pub mod method;
pub mod file;
pub mod cli;
pub mod function_signature; |
use diesel::prelude::*;
use diesel::pg::PgConnection;
use serde::{
Serialize,
Deserialize,
};
use crate::schema::logs;
use chrono::NaiveDateTime;
use crate::server::errors::{
Result,
ApiError,
};
use std::str::FromStr;
#[derive(DbEnum, Debug, PartialEq, Serialize, Deserialize, Clone)]
#[PgType = "logging_level"]
#[DieselType = "Logging_level"]
pub enum LoggingLevel {
Debug,
Info,
Warning,
Error,
}
impl FromStr for LoggingLevel {
type Err = ApiError;
fn from_str(input: &str) -> std::result::Result<LoggingLevel, Self::Err> {
match input {
"Debug" => Ok(Self::Debug),
"Info" => Ok(Self::Info),
"Warning" => Ok(Self::Warning),
"Error" => Ok(Self::Error),
_ => Err(ApiError {
code: 500,
message: "Wrong value of logging level".to_string(),
error_type: crate::server::errors::ErrorType::ValueError,
}),
}
}
}
#[derive(Serialize, Deserialize, Clone, Queryable, Identifiable)]
#[table_name = "logs"]
#[primary_key(id)]
pub struct LogNote {
pub id: i32,
pub message: String,
pub level: LoggingLevel,
pub datetime: NaiveDateTime,
}
#[derive(Serialize, Deserialize, Clone, Queryable, Insertable)]
#[table_name = "logs"]
pub struct NewLogNote {
pub message: String,
pub level: LoggingLevel,
}
impl LogNote {
pub async fn new(
instance: &NewLogNote,
conn: &PgConnection,
) -> Result<usize> {
Ok(diesel::insert_into(logs::table)
.values(instance)
.execute(conn)?
)
}
pub async fn get(
conn: &PgConnection,
) -> Result<Vec<Self>> {
logs::table
.order_by(logs::datetime.desc())
.load(conn)
.map_err(|e| e.into())
}
pub async fn get_range(
from_time: NaiveDateTime,
to_time: NaiveDateTime,
logging_level: LoggingLevel,
conn: &PgConnection,
) -> Result<Vec<Self>> {
logs::table
.filter(logs::level.eq(logging_level))
.filter(logs::datetime.between(from_time, to_time))
.order_by(logs::datetime.desc())
.load(conn)
.map_err(|e| e.into())
}
} |
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we can resolve nested projection types. Issue #20666.
// pretty-expanded FIXME #23616
use std::slice;
trait Bound {}
impl<'a> Bound for &'a i32 {}
trait IntoIterator {
type Iter: Iterator;
fn into_iter(self) -> Self::Iter;
}
impl<'a, T> IntoIterator for &'a [T; 3] {
type Iter = slice::Iter<'a, T>;
fn into_iter(self) -> slice::Iter<'a, T> {
self.iter()
}
}
fn foo<X>(x: X) where
X: IntoIterator,
<<X as IntoIterator>::Iter as Iterator>::Item: Bound,
{
}
fn bar<T, I, X>(x: X) where
T: Bound,
I: Iterator<Item=T>,
X: IntoIterator<Iter=I>,
{
}
fn main() {
foo(&[0, 1, 2]);
bar(&[0, 1, 2]);
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::collections::btree_map::BTreeMap;
use lazy_static::lazy_static;
use super::super::qlib::common::*;
use super::super::qlib::limits::*;
use super::super::runc::oci::*;
//use super::super::qlib::linux::limits::*;
lazy_static! {
pub static ref FROM_LINUX_SOURCE : BTreeMap<&'static str, LimitType> = [
("RLIMIT_AS", LimitType::AS),
("RLIMIT_CORE", LimitType::Core),
("RLIMIT_CPU", LimitType::CPU),
("RLIMIT_DATA", LimitType::Data),
("RLIMIT_FSIZE", LimitType::FileSize),
("RLIMIT_LOCKS", LimitType::Locks),
("RLIMIT_MEMLOCK", LimitType::MemoryLocked),
("RLIMIT_MSGQUEUE", LimitType::MessageQueueBytes),
("RLIMIT_NICE", LimitType::Nice),
("RLIMIT_NOFILE", LimitType::NumberOfFiles),
("RLIMIT_NPROC", LimitType::ProcessCount),
("RLIMIT_RSS", LimitType::Rss),
("RLIMIT_RTPRIO", LimitType::RealTimePriority),
("RLIMIT_RTTIME", LimitType::Rttime),
("RLIMIT_SIGPENDING", LimitType::SignalsPending),
("RLIMIT_STACK", LimitType::Stack),
].iter().cloned().collect();
pub static ref DEFAULT_LIMITS : LimitSet = DefaultLimits();
}
pub fn FindName(lt: LimitType) -> &'static str {
for (k, v) in FROM_LINUX_SOURCE.iter() {
if *v == lt {
return *k
}
}
return "unknown"
}
pub fn DefaultLimits() -> LimitSet {
let ls = NewLinuxLimitSet();
// Set default limits based on what containers get by default, ex:
// $ docker run --rm debian prlimit
ls.SetUnchecked(LimitType::AS, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::Core, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::CPU, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::Data, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::FileSize, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::Locks, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::MemoryLocked, Limit{Cur: 65536, Max: 65536});
ls.SetUnchecked(LimitType::MessageQueueBytes, Limit{Cur: 819200, Max: 819200});
ls.SetUnchecked(LimitType::Nice, Limit{Cur: 0, Max: 0});
ls.SetUnchecked(LimitType::NumberOfFiles, Limit{Cur: 1048576, Max: 1048576});
ls.SetUnchecked(LimitType::ProcessCount, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::Rss, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::RealTimePriority, Limit{Cur: 0, Max: 0});
ls.SetUnchecked(LimitType::Rttime, Limit{Cur: INFINITY, Max: INFINITY});
ls.SetUnchecked(LimitType::SignalsPending, Limit{Cur: 0, Max: 0});
ls.SetUnchecked(LimitType::Stack, Limit{Cur: 8388608, Max: INFINITY});
// Read host limits that directly affect the sandbox and adjust the defaults
// based on them.
for res in [libc::RLIMIT_FSIZE, libc::RLIMIT_NOFILE].iter() {
let mut hl = libc::rlimit {
rlim_cur: 0,
rlim_max: 0,
};
let ret = unsafe {
libc::getrlimit(*res, &mut hl)
};
let res = *res as i32;
if ret < 0 {
panic!("Getrlimit fail with err {}", errno::errno().0)
}
let lt = FROM_LINUX_RESOURCE.get(&res).expect(&format!("unknown rlimit type {}", res));
let hostLimit = Limit {
Cur: FromLinux(hl.rlim_cur),
Max: FromLinux(hl.rlim_max),
};
let defaultLimit = ls.Get(*lt);
if hostLimit.Cur != INFINITY && hostLimit.Cur < defaultLimit.Cur {
error!("Host limit is lower than recommended, resource: {}, host: {}, recommended: {}",
FindName(*lt), hostLimit.Cur, defaultLimit.Cur);
}
if hostLimit.Cur != defaultLimit.Cur || hostLimit.Max != defaultLimit.Max {
info!("Setting limit from host, resource: {} {{soft: {}, hard: {}}}", FindName(*lt), hostLimit.Cur, hostLimit.Max);
ls.SetUnchecked(*lt, hostLimit);
}
}
return ls
}
pub fn CreateLimitSet(spec: &Spec) -> Result<LimitSet> {
let ls = DEFAULT_LIMITS.GetCopy();
for rl in &spec.process.rlimits {
let lt = match FROM_LINUX_RESOURCE.get(&(rl.typ as i32)) {
None => return Err(Error::Common(format!("unknown resource {:?}", rl.typ))),
Some(lt) => *lt,
};
ls.SetUnchecked(lt, Limit {
Cur: rl.soft,
Max: rl.hard,
})
}
return Ok(ls)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.