text stringlengths 8 4.13M |
|---|
mod hs100;
pub mod timer;
pub use self::hs100::{Location, HS100};
use self::timer::{Rule, RuleList, Timer};
use crate::cloud::{Cloud, CloudInfo};
use crate::config::Config;
use crate::device::Device;
use crate::emeter::{DayStats, Emeter, MonthStats, RealtimeStats};
use crate::error::Result;
use crate::sys::Sys;
use crate::sysinfo::SysInfo;
use crate::time::{DeviceTime, DeviceTimeZone, Time};
use crate::wlan::{AccessPoint, Wlan};
use std::fmt;
use std::net::IpAddr;
use std::time::Duration;
/// A TP-Link Smart Plug.
///
/// # Examples
///
/// ```no_run
/// fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
///
/// plug.turn_on()?;
/// assert_eq!(plug.is_on()?, true);
///
/// plug.turn_off()?;
/// assert_eq!(plug.is_on()?, false);
///
/// Ok(())
/// }
/// ```
pub struct Plug<T> {
device: T,
}
impl<T: Device> Plug<T> {
/// Turns on the plug.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.turn_on()?;
/// assert_eq!(plug.is_on()?, true);
/// # Ok(())
/// # }
/// ```
pub fn turn_on(&mut self) -> Result<()> {
self.device.turn_on()
}
/// Turns off the plug.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.turn_off()?;
/// assert_eq!(plug.is_on()?, false);
/// # Ok(())
/// # }
/// ```
pub fn turn_off(&mut self) -> Result<()> {
self.device.turn_off()
}
}
impl<T: Sys> Plug<T> {
/// Reboots the plug after the given duration. In case when the delay
/// duration is not provided, the plug is set to reboot after a default
/// delay of 1 second.
///
/// # Examples
/// Reboots the plug after a delay of 3 seconds.
///
/// ```no_run
/// use std::time::Duration;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.reboot(Some(Duration::from_secs(3)))?;
/// # Ok(())
/// # }
/// ```
///
/// Reboots the plug after 1 second.
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.reboot(None)?;
/// # Ok(())
/// # }
/// ```
pub fn reboot(&mut self, delay: Option<Duration>) -> Result<()> {
self.device.reboot(delay)
}
/// Factory resets the plug after the given duration. In case when the delay
/// duration is not provided, the plug is set to reset after a default delay
/// of 1 second.
///
/// # Examples
/// Factory resets the plug after a delay of 3 seconds.
///
/// ```no_run
/// use std::time::Duration;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.factory_reset(Some(Duration::from_secs(3)))?;
/// # Ok(())
/// # }
/// ```
///
/// Factory resets the plug after 1 second.
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.factory_reset(None)?;
/// # Ok(())
/// # }
/// ```
pub fn factory_reset(&mut self, delay: Option<Duration>) -> Result<()> {
self.device.factory_reset(delay)
}
}
impl<T: Time> Plug<T> {
/// Returns the current date and time of the device without the timezone.
/// To get the device timezone, use [`timezone`] method.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let time = plug.time()?; // e.g. `2020-04-09 22:32:01`
/// # Ok(())
/// # }
/// ```
///
/// [`timezone`]: #method.timezone
pub fn time(&mut self) -> Result<DeviceTime> {
self.device.time()
}
/// Returns the current timezone of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let timezone = plug.timezone()?;
/// # Ok(())
/// # }
/// ```
pub fn timezone(&mut self) -> Result<DeviceTimeZone> {
self.device.timezone()
}
}
impl<T: Timer> Plug<T> {
pub fn get_timer_rules(&mut self) -> Result<RuleList> {
self.device.get_timer_rules()
}
pub fn add_timer_rule(&mut self, rule: Rule) -> Result<String> {
self.device.add_timer_rule(rule)
}
pub fn edit_timer_rule(&mut self, id: &str, rule: Rule) -> Result<()> {
self.device.edit_timer_rule(id, rule)
}
pub fn delete_timer_rule_with_id(&mut self, id: &str) -> Result<()> {
self.device.delete_timer_rule_with_id(id)
}
pub fn delete_all_timer_rules(&mut self) -> Result<()> {
self.device.delete_all_timer_rules()
}
}
impl<T: Cloud> Plug<T> {
pub fn get_cloud_info(&mut self) -> Result<CloudInfo> {
self.device.get_cloud_info()
}
pub fn bind(&mut self, username: &str, password: &str) -> Result<()> {
self.device.bind(username, password)
}
pub fn unbind(&mut self) -> Result<()> {
self.device.unbind()
}
pub fn get_firmware_list(&mut self) -> Result<Vec<String>> {
self.device.get_firmware_list()
}
pub fn set_server_url(&mut self, url: &str) -> Result<()> {
self.device.set_server_url(url)
}
}
impl<T: Wlan> Plug<T> {
pub fn get_scan_info(
&mut self,
refresh: bool,
timeout: Option<Duration>,
) -> Result<Vec<AccessPoint>> {
self.device.get_scan_info(refresh, timeout)
}
}
impl<T: Emeter> Plug<T> {
pub fn get_emeter_realtime(&mut self) -> Result<RealtimeStats> {
self.device.get_emeter_realtime()
}
pub fn get_emeter_month_stats(&mut self, year: u32) -> Result<MonthStats> {
self.device.get_emeter_month_stats(year)
}
pub fn get_emeter_day_stats(&mut self, month: u32, year: u32) -> Result<DayStats> {
self.device.get_emeter_day_stats(month, year)
}
pub fn erase_emeter_stats(&mut self) -> Result<()> {
self.device.erase_emeter_stats()
}
}
impl<T: SysInfo> Plug<T> {
/// Returns the plug's system information.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let sysinfo = plug.sysinfo()?;
/// # Ok(())
/// # }
/// ```
pub fn sysinfo(&mut self) -> Result<T::Info> {
self.device.sysinfo()
}
}
impl Plug<HS100> {
/// Creates a new Plug instance from the given local address.
///
/// # Examples
///
/// ```no_run
/// let plug = tplink::Plug::new([192, 168, 1, 100]);
/// ```
pub fn new<A>(host: A) -> Plug<HS100>
where
A: Into<IpAddr>,
{
Plug {
device: HS100::new(host),
}
}
pub fn with_config(config: Config) -> Plug<HS100> {
Plug {
device: HS100::with_config(config),
}
}
/// Returns the software version of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let sw_ver = plug.sw_ver()?;
/// # Ok(())
/// # }
/// ```
pub fn sw_ver(&mut self) -> Result<String> {
self.device.sw_ver()
}
/// Returns the hardware version of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let hw_ver = plug.hw_ver()?;
/// # Ok(())
/// # }
/// ```
pub fn hw_ver(&mut self) -> Result<String> {
self.device.hw_ver()
}
/// Returns the model of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let model = plug.model()?;
/// # Ok(())
/// # }
/// ```
pub fn model(&mut self) -> Result<String> {
self.device.model()
}
/// Returns the name (alias) of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let alias = plug.alias()?;
/// # Ok(())
/// # }
/// ```
pub fn alias(&mut self) -> Result<String> {
self.device.alias()
}
/// Returns the mac address of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let mac_address = plug.mac_address()?;
/// # Ok(())
/// # }
/// ```
pub fn mac_address(&mut self) -> Result<String> {
self.device.mac_address()
}
/// Returns the Wi-Fi signal strength (rssi) of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let rssi = plug.rssi()?;
/// # Ok(())
/// # }
/// ```
pub fn rssi(&mut self) -> Result<i64> {
self.device.rssi()
}
/// Returns the location of the device.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let location = plug.location()?;
/// # Ok(())
/// # }
/// ```
pub fn location(&mut self) -> Result<Location> {
self.device.location()
}
/// Returns whether the device is currently switched on.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let is_on = plug.is_on()?;
/// # Ok(())
/// # }
/// ```
pub fn is_on(&mut self) -> Result<bool> {
self.device.is_on()
}
/// Returns whether the device LED is currently switched on.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// let is_led_on = plug.is_led_on()?;
/// # Ok(())
/// # }
/// ```
pub fn is_led_on(&mut self) -> Result<bool> {
self.device.is_led_on()
}
/// Turns on the device's LED.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.turn_on_led()?;
/// assert_eq!(plug.is_led_on()?, true);
/// # Ok(())
/// # }
/// ```
pub fn turn_on_led(&mut self) -> Result<()> {
self.device.turn_on_led()
}
/// Turns off the device's LED.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut plug = tplink::Plug::new([192, 168, 1, 100]);
/// plug.turn_off_led()?;
/// assert_eq!(plug.is_led_on()?, false);
/// # Ok(())
/// # }
/// ```
pub fn turn_off_led(&mut self) -> Result<()> {
self.device.turn_off_led()
}
pub fn has_emeter(&mut self) -> Result<bool> {
self.device.has_emeter()
}
}
impl<T: fmt::Debug> fmt::Debug for Plug<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.device.fmt(f)
}
}
|
use ws::{connect, CloseCode};
use std::rc::Rc;
use std::cell::Cell;
use serde_json::{Value};
use crate::api::config::Config;
use crate::api::message::amount::Amount;
use crate::api::message::local_sign_tx::{LocalSignTx};
use crate::base::local_sign::sign_tx::{SignTx};
use crate::base::misc::util::{
downcast_to_string,
check_address, check_secret, check_amount,
};
use crate::api::utils::cast::get_account_sequence;
use crate::api::set_fee_rate::data::{
SetBrokerageTx,
SetBrokerageTxJson,
FeeRateResponse,
SetBrokerageSideKick
};
pub struct FeeRate {
pub config : Config,
pub account : String,
pub secret : String,
pub fee_account: String,
}
impl FeeRate {
pub fn with_params(config: Config, account: String, secret: String, fee_account: String) -> Self {
if check_address(&account).is_none() {
panic!("invalid account.");
}
if check_secret(&secret).is_none() {
panic!("invalid secret");
}
if check_secret(&fee_account).is_none() {
panic!("invalid fee_account");
}
FeeRate {
config: config,
account: account,
secret: secret,
fee_account: fee_account,
}
}
pub fn set_rate<F>(&self, den: u64, num: u64, amount: Amount, op: F)
where F: Fn(Result<FeeRateResponse, SetBrokerageSideKick>) {
if num <= 0 {
panic!("invalid num.");
}
if check_amount(&amount) == false {
panic!("invalid Amount.");
}
let info = Rc::new(Cell::new("".to_string()));
let account_rc = Rc::new(Cell::new(String::from(self.account.as_str())));
let secret_rc = Rc::new(Cell::new(String::from(self.secret.as_str())));
let fee_account_rc = Rc::new(Cell::new(String::from(self.fee_account.as_str())));
let den_rc = Rc::new(Cell::new( den ));
let num_rc = Rc::new(Cell::new( num ));
let amount_rc = Rc::new(Cell::new(amount));
// Get Account Seq
let account_seq = get_account_sequence(&self.config, self.account.clone());
connect(self.config.addr, |out| {
let copy = info.clone();
let account = account_rc.clone();
let secret = secret_rc.clone();
let fee_account = fee_account_rc.clone();
let den = den_rc.clone();
let num = num_rc.clone();
let amount = amount_rc.clone();
let account = account.take();
let tx_json = SetBrokerageTxJson::new(account, fee_account.take(), account_seq, den.take(), num.take(), amount.take());
if self.config.local_sign {
let blob = SignTx::with_params(account_seq, &secret.take()).set_rate(&tx_json);
if let Ok(command) = LocalSignTx::new(blob).to_string() {
out.send(command).unwrap()
}
} else {
if let Ok(command) = SetBrokerageTx::new(secret.take(), tx_json).to_string() {
out.send(command).unwrap()
}
}
move |msg: ws::Message| {
let c = msg.as_text()?;
copy.set(c.to_string());
out.close(CloseCode::Normal)
}
}).unwrap();
let resp = downcast_to_string(info);
if let Ok(x) = serde_json::from_str(&resp) as Result<Value, serde_json::error::Error> {
if let Some(status) = x["status"].as_str() {
if status == "success" {
let x: String = x["result"].to_string();
if let Ok(v) = serde_json::from_str(&x) as Result<FeeRateResponse, serde_json::error::Error> {
op(Ok(v))
}
} else {
if let Ok(v) = serde_json::from_str(&x.to_string()) as Result<SetBrokerageSideKick, serde_json::error::Error> {
op(Err(v))
}
}
}
}
}
}
|
use axum::{extract::Query, routing::get, Router};
use serde::Deserialize;
use std::net::SocketAddr;
#[derive(Debug, Deserialize)]
struct Param {
keyword: Option<String>,
size: Option<usize>,
}
#[tokio::main]
async fn main() {
let app = Router::new()
.route("/", get(handler))
.route("/sample", get(params_handler));
let addr = SocketAddr::from(([127, 0, 0, 1], 8080));
axum::Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
}
async fn handler() -> &'static str {
"ok"
}
async fn params_handler(Query(param): Query<Param>) -> String {
println!("{:?}", param);
format!("ok:{}, {}", param.keyword.unwrap_or("".to_string()), param.size.unwrap_or(0))
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use failure::{format_err, Error, ResultExt};
use std::fs;
use std::io;
use std::path::Path;
use fidl;
use fidl::endpoints::ServiceMarker;
use fidl_fuchsia_netemul_environment::{
EnvironmentOptions, LaunchService, LoggerOptions, ManagedEnvironmentMarker, VirtualDevice,
};
use fidl_fuchsia_netemul_network::{
DeviceProxy_Marker, EndpointBacking, EndpointConfig, EndpointManagerMarker,
NetworkContextMarker,
};
use fidl_fuchsia_netemul_sync::SyncManagerMarker;
use fidl_fuchsia_netstack::NetstackMarker;
use fidl_fuchsia_sys::{
ComponentControllerEvent, ComponentControllerEventStream, ComponentControllerMarker,
LaunchInfo, LauncherMarker, TerminationReason,
};
use fuchsia_async as fasync;
use fuchsia_component::client;
use futures::TryStreamExt;
use structopt::StructOpt;
const EP_NAME: &str = "ep0";
const EP_MOUNT: &str = "class/ethernet/ep0";
const MY_PACKAGE: &str = "fuchsia-pkg://fuchsia.com/netemul_sandbox_test#meta/svc_list.cmx";
const NETSTACK_URL: &str = "fuchsia-pkg://fuchsia.com/netstack#meta/netstack.cmx";
const SKIP_DIRS: &'static [&str] = &["/data", "/pkg"];
const FAKE_SVC_NAME: &str = "fuchsia.some.fake.Service";
const FAKE_SVC_URL: &str = "fuchsia-pkg://fuchsia.com/fake#meta/fake.cmx";
fn visit_dirs(dir: &Path) -> io::Result<()> {
let strpath = dir.to_str().unwrap();
if SKIP_DIRS.contains(&strpath) {
// skip some of the entries to avoid clogging the logs
println!("{}/[...]", strpath);
} else if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
let str = path.to_str().expect("paths need strings?");
println!("{}", str);
visit_dirs(dir.join(path).as_path())?;
}
}
Ok(())
}
async fn wait_for_component(
component_events: &mut ComponentControllerEventStream,
) -> Result<(), Error> {
// wait for child to exit and mimic the result code
let result = loop {
let event = component_events.try_next().await
.context("wait for child component to exit")?
.ok_or_else(|| format_err!("Child didn't exit cleanly"))?;
match event {
ComponentControllerEvent::OnTerminated {
return_code: code,
termination_reason: reason,
} => {
println!("Child exited with code {}, reason {}", code, reason as u32);
if code != 0 || reason != TerminationReason::Exited {
break Err(format_err!(
"Child exited with code {}, reason {}",
code,
reason as u32
));
} else {
break Ok(());
}
}
_ => {
continue;
}
}
};
result
}
// this is the main body of our test, which
// runs in an executor
async fn run_test() -> Result<(), Error> {
// connect to NetworkContext and ManagedEnvironment services
let netctx = client::connect_to_service::<NetworkContextMarker>()?;
let env = client::connect_to_service::<ManagedEnvironmentMarker>()?;
// get the endpoint manager
let (epm, epmch) = fidl::endpoints::create_proxy::<EndpointManagerMarker>()?;
netctx.get_endpoint_manager(epmch)?;
let mut cfg = EndpointConfig { backing: EndpointBacking::Ethertap, mac: None, mtu: 1500 };
// create a network endpoint
let (_, ep) = epm.create_endpoint(EP_NAME, &mut cfg).await?;
let ep = ep.unwrap().into_proxy()?;
// get the endpoint proxy to pass to child environment
let (ep_proxy_client, ep_proxy_server) =
fidl::endpoints::create_endpoints::<DeviceProxy_Marker>()?;
ep.get_proxy_(ep_proxy_server)?;
// prepare a child managed environment
let (child_env, child_env_server) =
fidl::endpoints::create_proxy::<ManagedEnvironmentMarker>()?;
let env_options = EnvironmentOptions {
name: Some(String::from("child_env")),
services: Some(vec![LaunchService {
name: String::from(NetstackMarker::NAME),
url: String::from(NETSTACK_URL),
arguments: None,
}]),
// pass the endpoint's proxy to create a virtual device
devices: Some(vec![VirtualDevice {
path: String::from(EP_MOUNT),
device: ep_proxy_client,
}]),
inherit_parent_launch_services: Some(false),
logger_options: Some(LoggerOptions {
enabled: Some(true),
klogs_enabled: Some(false),
filter_options: None,
syslog_output: None,
}),
};
// launch the child env
env.create_child_environment(child_env_server, env_options)?;
// launch as a process in the created environment.
let (launcher, launcher_req) = fidl::endpoints::create_proxy::<LauncherMarker>()?;
child_env.get_launcher(launcher_req)?;
// launch info is our own package
// plus the command line argument to run the child proc
let mut linfo = LaunchInfo {
url: String::from(MY_PACKAGE),
arguments: Some(vec![String::from("-c")]),
additional_services: None,
directory_request: None,
err: None,
out: None,
flat_namespace: None,
};
let (comp_controller, comp_controller_req) =
fidl::endpoints::create_proxy::<ComponentControllerMarker>()?;
let mut component_events = comp_controller.take_event_stream();
launcher.create_component(&mut linfo, Some(comp_controller_req))?;
wait_for_component(&mut component_events).await
}
fn check_service(service: &str) -> Result<(), Error> {
let fs_path = format!("/svc/{}", service);
let p = Path::new(&fs_path);
if p.exists() {
Ok(())
} else {
Err(format_err!("Service {} does not exist", service))
}
}
fn check_virtual_device(vdev: &str) -> Result<(), Error> {
let fs_path = &format!("/vdev/{}", vdev);
let p = Path::new(fs_path);
if p.exists() {
Ok(())
} else {
Err(format_err!("Virtual device {} does not exist", vdev))
}
}
fn check_vdata() -> Result<(), Error> {
if Path::new("/vdata/.THIS_IS_A_VIRTUAL_FS").exists() {
Ok(())
} else {
Err(format_err!("/vdata does not exist"))
}
}
async fn launch_grandchild() -> Result<(), Error> {
let env = client::connect_to_service::<ManagedEnvironmentMarker>()?;
let env_options = EnvironmentOptions {
name: Some(String::from("grandchild_env")),
// add some arbitrary service to the grandchild environment
services: Some(vec![LaunchService {
name: String::from(FAKE_SVC_NAME),
url: String::from(FAKE_SVC_URL),
arguments: None,
}]),
devices: None,
// inherit parent configuration to check if netstack flows through
// this won't be the same netstack *instance*, though. But it should be
// launched with the same url as the "child" environment
inherit_parent_launch_services: Some(true),
logger_options: Some(LoggerOptions {
enabled: Some(true),
klogs_enabled: Some(false),
filter_options: None,
syslog_output: None,
}),
};
let (child_env, child_env_server) =
fidl::endpoints::create_proxy::<ManagedEnvironmentMarker>()?;
// launch the grandchild env
env.create_child_environment(child_env_server, env_options)?;
// launch info is our own package
// plus the command line argument to run the grandchild proc
let mut linfo = LaunchInfo {
url: String::from(MY_PACKAGE),
arguments: Some(vec![String::from("-g")]),
additional_services: None,
directory_request: None,
err: None,
out: None,
flat_namespace: None,
};
// launch myself as a process in the created environment.
let (launcher, launcher_req) = fidl::endpoints::create_proxy::<LauncherMarker>()?;
child_env.get_launcher(launcher_req)?;
let (comp_controller, comp_controller_req) =
fidl::endpoints::create_proxy::<ComponentControllerMarker>()?;
let mut component_events = comp_controller.take_event_stream();
launcher.create_component(&mut linfo, Some(comp_controller_req))?;
wait_for_component(&mut component_events).await
}
fn main() -> Result<(), Error> {
// make sure all services exist!
check_vdata()?;
check_service(NetworkContextMarker::NAME)?;
check_service(ManagedEnvironmentMarker::NAME)?;
check_service(SyncManagerMarker::NAME)?;
#[derive(StructOpt, Debug)]
struct Opt {
#[structopt(short = "c")]
is_child: bool,
#[structopt(short = "g")]
is_grandchild: bool,
}
let opt = Opt::from_args();
// the same binary is used for the root test
// and the test in child envs
// a flag is passed on the command line to change
// the code path
if opt.is_child {
let mut executor = fasync::Executor::new().context("Error creating executor")?;
println!("Running as child");
// print whole namespace to console (for manual testing)
visit_dirs(Path::new("/"))?;
// check that the virtual ethernet device is there
check_virtual_device(EP_MOUNT)?;
// check that netstack was served
check_service(NetstackMarker::NAME)?;
// launch grandchild service to test environment inheritance
executor.run_singlethreaded(launch_grandchild())
} else if opt.is_grandchild {
println!("Running as grandchild");
// assert that netstack was served (should be present due to inheritance)
check_service(NetstackMarker::NAME)?;
// and the fake service:
check_service(FAKE_SVC_NAME)?;
Ok(())
} else {
let mut executor = fasync::Executor::new().context("Error creating executor")?;
executor.run_singlethreaded(run_test())
}
}
|
use crate::vec2::{Vec2, vec2};
use crate::Framebuffer;
use crate::data::DataDef;
mod player;
pub struct Entity {
pub data: EntityData,
pub kind: EntityKind
}
pub struct EntitySet {
pub inner: [Entity; 32],
}
pub struct EntityEntry {
pub x: u8,
pub y: u8,
pub kind: u8
}
pub struct Sprite {
active: bool,
source: DataDef,
offset: Vec2<i32>,
frame: i32
}
pub struct EntityData {
pub state: EntityState,
pub pos: Vec2<i32>,
pub vel: Vec2<i32>,
pub on_ground: bool,
pub sensors: [*mut u8; 6],
pub sensor_pos: [Vec2<i32>; 6],
pub flip: Vec2<bool>,
pub next_pos: Vec2<i32>,
pub radius: Vec2<i32>,
pub sprite: Sprite,
pub sprites: [Sprite; 4],
pub anim_timer: i32,
}
#[repr(u8)]
pub enum EntityKind {
None,
Player(player::Player),
Tomato,
Bee,
Snail,
Platform
}
pub enum EntityState {
Alive,
Squished,
Stunned,
Kicked,
Dead
}
impl Entity {
pub fn init(&mut self, id: u8) {
self.kind.init_kind(id);
self.kind.init(&mut self.data);
}
}
impl EntityKind {
pub fn init_kind(&mut self, id: u8) {
unsafe { (self as *mut _ as *mut u8).write(id); }
}
pub fn init(&mut self, data: &mut EntityData) {
match self {
EntityKind::None => {},
EntityKind::Player(p) => {
p.init();
data.vel = vec2(0,0);
data.radius = vec2(0x400, 0xE00);
data.sprites[0].active = true;
data.sprites[0].source = crate::data::TOOTHPASTE;
data.sprites[0].offset = vec2(-16, -16);
data.sprites[0].frame = 0x10;
data.sprites[1].active = true;
data.sprites[1].source = crate::data::TOOTHPASTE;
data.sprites[1].offset = vec2(-16, -16);
data.sprites[1].frame = 0;
},
EntityKind::Tomato => {
data.radius = vec2(0x400, 0x400);
data.flip.x = true;
data.sprites[0].active = true;
data.sprites[0].source = crate::data::ENTITIES;
data.sprites[0].offset = vec2(-16, -26);
data.sprites[0].frame = 0;
},
_ => {}
}
}
pub fn process(&mut self, data: &mut EntityData) {
let camera = crate::state().as_level().camera;
let x_delta = data.pos.x - camera.x * 0x100;
if x_delta < -0x2000 || x_delta > 0x16000 { return; }
let fg = &crate::state().as_level().fg;
let level_h = fg.height() as i32 * 16 * 256;
if data.pos.y > level_h { *self = EntityKind::None; return; }
match data.state {
EntityState::Squished => {
data.anim_timer -= 1;
if data.anim_timer == 0 { *self = EntityKind::None; data.pos = vec2(0,0); }
}
EntityState::Dead => {
data.pos += data.vel;
data.vel.y += 0x30;
data.flip.y = true;
}
EntityState::Alive => match self {
EntityKind::None => {},
EntityKind::Player(p) => p.run(data),
EntityKind::Tomato => {
if data.flip.x {
data.vel.x = -0x60;
} else {
data.vel.x = 0x60;
}
if data.on_ground {
data.vel.y = 0;
} else {
data.vel.y += 0x30;
}
data.anim_timer += 0x1;
data.sprites[0].frame = match data.anim_timer / 10 & 3 {
0 => 0,
1 => 1,
2 => 2,
3 => 1,
_ => panic!()
};
data.physics();
if data.vel.x == 0 { data.flip.x = !data.flip.x; }
}
_ => {}
}
_ => {}
}
}
}
impl EntitySet {
pub fn init(&mut self) {
for i in self.inner.iter_mut() {
i.kind = EntityKind::None;
i.data.init();
}
}
pub fn init_with(&mut self, list: &[EntityEntry]) {
for idx in 0..32 {
self.inner[idx].data.init();
let e = &mut self.inner[idx];
if let Some(c) = list.get(idx) {
e.init(c.kind);
e.data.pos = vec2(c.x as i32, c.y as i32) * 0x1000 + vec2(0x800, 0xF00-e.data.radius.y);
} else {
e.kind = EntityKind::None;
}
}
}
pub fn process(&mut self) {
for i in self.inner.iter_mut() {
i.kind.process(&mut i.data);
}
}
pub fn render(&self, camera: Vec2<i32>, fb: &mut Framebuffer) {
for i in self.inner.iter() {
if !matches!(i.kind, EntityKind::None) {
i.data.render(camera, fb);
}
}
}
}
impl EntityData {
pub fn init(&mut self) {
*self = unsafe { core::mem::zeroed() };
}
pub fn visual_pos(&self) -> Vec2<i32> {
self.pos >> 8
}
pub fn intersects(&self, other: &EntityData) -> bool {
let dist = (self.pos - other.pos).map(|c| c.abs());
let rad = self.radius + other.radius;
dist.x < rad.x && dist.y < rad.y
}
pub fn render(&self, camera: Vec2<i32>, fb: &mut Framebuffer) {
for spr in self.sprites.iter().filter(|c| c.active) {
let data = spr.source.data();
let pal = spr.source.pal();
for mut x in 0..32 {
for mut y in 0..32 {
let mut pos = self.visual_pos() + vec2(x as i32, y as i32) - camera + spr.offset;
if self.flip.x {
//pos.x -= i.offset.x * 2;
x = 31 - x;
}
if self.flip.y {
//pos.x -= i.offset.x * 2;
y = 31 - y;
}
let px = data[x+y*32 + spr.frame as usize *32*32];
if px != 0 {
let px = pal[px as usize];
fb.pixel(pos).map(|c| *c = px);
}
}
}
/*
for x in -self.radius.x/256..self.radius.x/256+1 {
for y in -self.radius.y/256..self.radius.y/256+1 {
let mut pos = self.visual_pos() + vec2(x as i32, y as i32) - camera;
fb.pixel(pos).map(|c| *c = *c & 0x7FFFFFFF);
}
}
let pos = self.visual_pos() - camera;
fb.pixel(pos).map(|c| *c = 0xFFFF2020);
for i in self.sensor_pos.iter() {
let pos = *i / 256 - camera;
fb.pixel(pos).map(|c| *c = 0xFF0000FF);
}*/
}
}
pub fn physics(&mut self) {
let fg = &mut crate::state().as_level().fg;
let mut next_pos = self.pos;
let last_on_ground = self.on_ground;
self.on_ground = false;
for axis in 0..2 {
next_pos[axis] += self.vel[axis];
let direction = self.vel[axis] >= 0;
let mut sensor_pos = if direction {
self.radius[axis]
} else {
-self.radius[axis]
};
if axis == 1 && last_on_ground { sensor_pos += 256; }
for i in -1..=1 {
let p = axis * 3 + (i+1) as usize;
let mut offset = self.radius * i;
offset[axis] = sensor_pos;
self.sensor_pos[p] = ((next_pos + offset) & !0xFFF) + vec2(0x800, 0x800);
let sensor = (next_pos + offset) / 16 / 256;
if !fg.in_bounds(sensor) { continue; }
let block = unsafe { &mut *fg.mut_ptr_at(sensor) };
self.sensors[p] = block;
let coll = crate::foreground::collision(*block);
if coll.is_semisolid() {
if direction && axis == 1 && (next_pos.y + sensor_pos) & 0xFFF < 0x400 {
self.on_ground = true;
next_pos.y = ((next_pos.y + sensor_pos) & !0xFFF) - self.radius.y - 0x100;
}
} else if coll.is_solid() {
if direction && axis == 1 {
self.on_ground = true;
next_pos.y = ((next_pos.y + sensor_pos) & !0xFFF) - self.radius.y - 0x100;
} else {
next_pos[axis] = self.pos[axis]; self.vel[axis] = 0;
}
}
}
}
self.pos = next_pos;
}
}
|
mod computer;
use computer::Wire;
#[aoc_generator(day3)]
fn generator(input: &str) -> Vec<Wire> {
input
.lines()
.map(|instructions| Wire::from_str(instructions))
.collect()
}
#[aoc(day3, part1)]
fn part_one(input: &[Wire]) -> i32 {
let first_wire = input[0].clone();
let second_wire = input[1].clone();
let intersections = first_wire.find_intersections(&second_wire);
intersections
.into_iter()
.map(|intersection| intersection.0.abs() + intersection.1.abs())
.min()
.unwrap()
}
#[aoc(day3, part2)]
fn part_two(input: &[Wire]) -> usize {
let first_wire = input[0].clone();
let second_wire = input[1].clone();
let intersections = first_wire.find_intersections(&second_wire);
intersections
.into_iter()
.map(|intersection| first_wire.steps_to(intersection) + second_wire.steps_to(intersection))
.min()
.unwrap()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sample_one() {
let result = part_one(&generator("\
U7,R6,D4,L4
R8,U5,L5,D3"));
assert_eq!(6, result);
}
#[test]
fn sample_two() {
let result = part_two(&generator("\
U7,R6,D4,L4
R8,U5,L5,D3"));
assert_eq!(30, result);
}
} |
#![allow(unused_imports, unused_qualifications, unused_extern_crates)]
extern crate chrono;
use serde::{Serialize, Deserialize};
use serde::ser::Serializer;
use std::collections::HashMap;
use std::string::ParseError;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[cfg_attr(feature = "conversion", derive(LabelledGeneric))]
pub struct Error {
#[serde(rename = "message")]
pub message: String,
}
impl Error {
pub fn new(message: String, ) -> Error {
Error {
message: message,
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[cfg_attr(feature = "conversion", derive(LabelledGeneric))]
pub struct HostKeyInitContext {
#[serde(rename = "suggested_key_name")]
pub suggested_key_name: String,
#[serde(rename = "account_id")]
#[serde(skip_serializing_if="Option::is_none")]
pub account_id: Option<String>,
}
impl HostKeyInitContext {
pub fn new(suggested_key_name: String, ) -> HostKeyInitContext {
HostKeyInitContext {
suggested_key_name: suggested_key_name,
account_id: None,
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[cfg_attr(feature = "conversion", derive(LabelledGeneric))]
pub struct HostKeyInitFinalizationContext {
#[serde(rename = "id")]
pub id: String,
#[serde(rename = "private")]
pub private: String,
}
impl HostKeyInitFinalizationContext {
pub fn new(id: String, private: String, ) -> HostKeyInitFinalizationContext {
HostKeyInitFinalizationContext {
id: id,
private: private,
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[cfg_attr(feature = "conversion", derive(LabelledGeneric))]
pub struct KeyData {
#[serde(rename = "key")]
pub key: String,
}
impl KeyData {
pub fn new(key: String, ) -> KeyData {
KeyData {
key: key,
}
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize, Deserialize)]
#[cfg_attr(feature = "conversion", derive(LabelledGeneric))]
pub struct KeyRequestId(String);
impl ::std::convert::From<String> for KeyRequestId {
fn from(x: String) -> Self {
KeyRequestId(x)
}
}
impl std::str::FromStr for KeyRequestId {
type Err = ParseError;
fn from_str(x: &str) -> Result<Self, Self::Err> {
Ok(KeyRequestId(x.to_string()))
}
}
impl ::std::convert::From<KeyRequestId> for String {
fn from(x: KeyRequestId) -> Self {
x.0
}
}
impl ::std::ops::Deref for KeyRequestId {
type Target = String;
fn deref(&self) -> &String {
&self.0
}
}
impl ::std::ops::DerefMut for KeyRequestId {
fn deref_mut(&mut self) -> &mut String {
&mut self.0
}
}
|
//! A lot of this module is copied from LD44.
use amethyst::renderer::sprite::SpriteSheetHandle;
use json::JsonValue;
use crate::components::prelude::*;
/// Generate an Animation from the given properties.
pub fn animation_from(
spritesheet_handle: SpriteSheetHandle,
properties: &JsonValue,
) -> Option<Animation> {
fn sprites_and_delays_from(
str_sprite_ids: &str,
str_delays_ms: &str,
) -> (Vec<usize>, Vec<u64>) {
(
str_sprite_ids
.split(",")
.map(|str_id| {
str_id.trim().parse::<usize>().expect(&format!(
"Couldn't parse string to usize '{}' in '{}' \
(animation_sprite_ids)",
str_id, str_sprite_ids
))
})
.collect(),
str_delays_ms
.split(",")
.map(|str_ms| {
str_ms.trim().parse::<u64>().expect(&format!(
"Couldn't parse string to u64 '{}' in '{}' \
(animation_delays_ms)",
str_ms, str_delays_ms
))
})
.collect(),
)
}
match (
properties["animation_sprite_ids"].as_str(),
properties["animation_delays_ms"].as_str(),
) {
(Some(_), None) | (None, Some(_)) => panic!(
"Tile with animation needs both properties `animation_sprite_ids` \
and `animation_delays_ms`"
),
(Some(str_sprite_ids), Some(str_delays_ms)) => {
let (sprite_ids, delays_ms) =
sprites_and_delays_from(str_sprite_ids, str_delays_ms);
Some(
Animation::new()
.default_sprite_sheet_handle(spritesheet_handle)
.sprite_ids(sprite_ids)
.delays_ms(delays_ms)
.build(),
)
}
(None, None) => None,
}
}
/// Generate a AnimationsContainer from the the given animations ron file.
pub fn animations_container_from_file<T>(
file: T,
spritesheet_handle: SpriteSheetHandle,
) -> AnimationsContainer
where
T: ToString,
{
let animations_container_config = load_animations_container_config(file);
let mut animations_container = AnimationsContainer::new();
for animation_config in animations_container_config.animations {
let mut animation = Animation::new()
.default_sprite_sheet_handle(spritesheet_handle.clone());
if let Some(default_delay_ms) = animation_config.default_delay_ms {
animation = animation.default_delay_ms(default_delay_ms);
}
if let Some(delays_ms) = animation_config.delays_ms {
animation = animation.delays_ms(delays_ms);
}
animation = animation.sprite_ids(animation_config.sprite_ids);
animations_container = animations_container
.insert(animation_config.name, animation.build());
}
if let Some(current) = animations_container_config.current {
animations_container = animations_container.current(current);
}
animations_container.build()
}
#[derive(Deserialize)]
struct AnimationConfig {
pub name: String,
pub sprite_ids: Vec<usize>,
pub delays_ms: Option<Vec<u64>>,
pub default_delay_ms: Option<u64>,
}
#[derive(Deserialize)]
struct AnimationsContainerConfig {
pub animations: Vec<AnimationConfig>,
pub current: Option<String>,
}
fn load_animations_container_config<T>(filepath: T) -> AnimationsContainerConfig
where
T: ToString,
{
use std::fs::File;
let file = File::open(filepath.to_string())
.expect("Couldn't open animations file");
ron::de::from_reader(file).expect("Failed parsing animations file")
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
#[repr(transparent)]
pub struct AlternateNormalizationFormat(pub i32);
impl AlternateNormalizationFormat {
pub const NotNormalized: Self = Self(0i32);
pub const Number: Self = Self(1i32);
pub const Currency: Self = Self(3i32);
pub const Date: Self = Self(4i32);
pub const Time: Self = Self(5i32);
}
impl ::core::marker::Copy for AlternateNormalizationFormat {}
impl ::core::clone::Clone for AlternateNormalizationFormat {
fn clone(&self) -> Self {
*self
}
}
pub type AlternateWordForm = *mut ::core::ffi::c_void;
pub type SelectableWordSegment = *mut ::core::ffi::c_void;
pub type SelectableWordSegmentsTokenizingHandler = *mut ::core::ffi::c_void;
pub type SelectableWordsSegmenter = *mut ::core::ffi::c_void;
pub type SemanticTextQuery = *mut ::core::ffi::c_void;
pub type TextConversionGenerator = *mut ::core::ffi::c_void;
pub type TextPhoneme = *mut ::core::ffi::c_void;
pub type TextPredictionGenerator = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct TextPredictionOptions(pub u32);
impl TextPredictionOptions {
pub const None: Self = Self(0u32);
pub const Predictions: Self = Self(1u32);
pub const Corrections: Self = Self(2u32);
}
impl ::core::marker::Copy for TextPredictionOptions {}
impl ::core::clone::Clone for TextPredictionOptions {
fn clone(&self) -> Self {
*self
}
}
pub type TextReverseConversionGenerator = *mut ::core::ffi::c_void;
#[repr(C)]
pub struct TextSegment {
pub StartPosition: u32,
pub Length: u32,
}
impl ::core::marker::Copy for TextSegment {}
impl ::core::clone::Clone for TextSegment {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct UnicodeGeneralCategory(pub i32);
impl UnicodeGeneralCategory {
pub const UppercaseLetter: Self = Self(0i32);
pub const LowercaseLetter: Self = Self(1i32);
pub const TitlecaseLetter: Self = Self(2i32);
pub const ModifierLetter: Self = Self(3i32);
pub const OtherLetter: Self = Self(4i32);
pub const NonspacingMark: Self = Self(5i32);
pub const SpacingCombiningMark: Self = Self(6i32);
pub const EnclosingMark: Self = Self(7i32);
pub const DecimalDigitNumber: Self = Self(8i32);
pub const LetterNumber: Self = Self(9i32);
pub const OtherNumber: Self = Self(10i32);
pub const SpaceSeparator: Self = Self(11i32);
pub const LineSeparator: Self = Self(12i32);
pub const ParagraphSeparator: Self = Self(13i32);
pub const Control: Self = Self(14i32);
pub const Format: Self = Self(15i32);
pub const Surrogate: Self = Self(16i32);
pub const PrivateUse: Self = Self(17i32);
pub const ConnectorPunctuation: Self = Self(18i32);
pub const DashPunctuation: Self = Self(19i32);
pub const OpenPunctuation: Self = Self(20i32);
pub const ClosePunctuation: Self = Self(21i32);
pub const InitialQuotePunctuation: Self = Self(22i32);
pub const FinalQuotePunctuation: Self = Self(23i32);
pub const OtherPunctuation: Self = Self(24i32);
pub const MathSymbol: Self = Self(25i32);
pub const CurrencySymbol: Self = Self(26i32);
pub const ModifierSymbol: Self = Self(27i32);
pub const OtherSymbol: Self = Self(28i32);
pub const NotAssigned: Self = Self(29i32);
}
impl ::core::marker::Copy for UnicodeGeneralCategory {}
impl ::core::clone::Clone for UnicodeGeneralCategory {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct UnicodeNumericType(pub i32);
impl UnicodeNumericType {
pub const None: Self = Self(0i32);
pub const Decimal: Self = Self(1i32);
pub const Digit: Self = Self(2i32);
pub const Numeric: Self = Self(3i32);
}
impl ::core::marker::Copy for UnicodeNumericType {}
impl ::core::clone::Clone for UnicodeNumericType {
fn clone(&self) -> Self {
*self
}
}
pub type WordSegment = *mut ::core::ffi::c_void;
pub type WordSegmentsTokenizingHandler = *mut ::core::ffi::c_void;
pub type WordsSegmenter = *mut ::core::ffi::c_void;
|
//! Build configuration
extern crate tinyrick;
/// Run clippy
fn clippy() {
tinyrick::exec!("cargo", &["clippy"]);
}
/// Generate documentation
fn doc() {
tinyrick::exec!("cargo", &["doc"]);
}
/// Static code validation
fn lint() {
tinyrick::deps(doc);
tinyrick::deps(clippy);
}
/// Lint, and then install artifacts
fn install() {
tinyrick::deps(lint);
tinyrick::exec!("cargo", &["install", "--force", "--path", "."]);
}
/// Uninstall artifacts
fn uninstall() {
tinyrick::exec!("cargo", &["uninstall"]);
}
/// Lint, and then run unit tests
fn unit_test() {
tinyrick::deps(lint);
tinyrick::exec!("cargo", &["test"]);
}
/// Lint, and then run integration tests
fn integration_test() {
tinyrick::deps(install);
assert!(tinyrick::exec_stdout_utf8!("add_two", &["-n", "2"]) == "4\n");
assert!(!tinyrick::exec_status!("add_two").success());
}
/// Lint, and then run tests
fn test() {
tinyrick::deps(unit_test);
tinyrick::deps(integration_test);
}
/// Lint, test, and build debug binaries
fn build_debug() {
tinyrick::deps(test);
tinyrick::exec!("cargo", &["build"]);
}
/// Lint, test, and build release binaries
fn build_release() {
tinyrick::deps(test);
tinyrick::exec!("cargo", &["build", "--release"]);
}
/// Lint, test, and then build binaries
fn build() {
tinyrick::deps(build_debug);
tinyrick::deps(build_release);
}
/// Show banner
fn banner() {
println!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"));
}
/// Publish to crate repository
fn publish() {
tinyrick::exec!("cargo", &["publish"]);
}
/// Run cargo clean
fn clean_cargo() {
tinyrick::exec!("cargo", &["clean"]);
}
/// Clean workspaces
fn clean() {
tinyrick::deps(clean_cargo);
}
/// CLI entrypoint
fn main() {
tinyrick::phony!(
uninstall,
clean_cargo,
clean
);
tinyrick::wubba_lubba_dub_dub!(
build;
clippy,
lint,
doc,
install,
uninstall,
unit_test,
integration_test,
test,
build_debug,
build_release,
build,
banner,
publish,
clean_cargo,
clean
);
}
|
mod control_handle;
mod control_base;
mod window;
mod button;
mod check_box;
mod radio_button;
mod text_input;
mod label;
mod image_frame;
#[cfg(feature = "textbox")]
mod text_box;
#[cfg(feature = "rich-textbox")]
mod rich_text_box;
#[cfg(feature = "rich-textbox")]
mod rich_label;
#[cfg(feature = "status-bar")]
mod status_bar;
#[cfg(feature = "tooltip")]
mod tooltip;
#[cfg(feature = "trackbar")]
mod track_bar;
#[cfg(feature = "menu")]
mod menu;
#[cfg(feature = "timer")]
mod timer;
#[cfg(feature = "animation-timer")]
mod animation_timer;
#[cfg(feature = "notice")]
mod notice;
#[cfg(feature = "combobox")]
mod combo_box;
#[cfg(feature = "listbox")]
mod list_box;
#[cfg(feature = "datetime-picker")]
mod date_picker;
#[cfg(feature = "progress-bar")]
mod progress_bar;
#[cfg(feature = "tabs")]
mod tabs;
#[cfg(feature = "tree-view")]
mod treeview;
#[cfg(all(feature = "tree-view-iterator", feature = "tree-view") )]
mod treeview_iterator;
#[cfg(feature = "tray-notification")]
mod tray_notification;
#[cfg(feature = "message-window")]
mod message_window;
#[cfg(feature = "list-view")]
mod list_view;
#[cfg(feature = "number-select")]
mod number_select;
#[cfg(feature = "extern-canvas")]
mod extern_canvas;
#[cfg(feature = "frame")]
mod frame;
#[cfg(feature = "scroll-bar")]
mod scroll_bar;
#[cfg(feature = "plotting")]
mod plotters;
mod handle_from_control;
pub use control_handle::ControlHandle;
pub use control_base::{ControlBase, HwndBuilder, TimerBuilder as BaseTimerBuilder, OtherBuilder};
pub use window::{Window, WindowBuilder, WindowFlags};
pub use button::{Button, ButtonBuilder, ButtonFlags};
pub use check_box::{CheckBox, CheckBoxBuilder, CheckBoxState, CheckBoxFlags};
pub use radio_button::{RadioButton, RadioButtonBuilder, RadioButtonState, RadioButtonFlags};
pub use text_input::{TextInput, TextInputBuilder, TextInputFlags};
pub use label::{Label, LabelBuilder, LabelFlags};
pub use image_frame::{ImageFrame, ImageFrameBuilder, ImageFrameFlags};
#[cfg(feature = "textbox")]
pub use text_box::{TextBox, TextBoxBuilder, TextBoxFlags};
#[cfg(feature = "rich-textbox")]
pub use rich_text_box::*;
#[cfg(feature = "rich-textbox")]
pub use rich_label::*;
#[cfg(feature = "status-bar")]
pub use status_bar::{StatusBar, StatusBarBuilder};
#[cfg(feature = "tooltip")]
pub use tooltip::{Tooltip, TooltipBuilder, TooltipIcon};
#[cfg(feature = "trackbar")]
pub use track_bar::{TrackBar, TrackBarBuilder, TrackBarFlags};
#[cfg(feature = "menu")]
pub use menu::{Menu, MenuBuilder, MenuItem, MenuSeparator, MenuItemBuilder, PopupMenuFlags};
#[cfg(feature = "menu")]
pub use control_base::HmenuBuilder;
#[cfg(feature = "timer")]
#[allow(deprecated)]
pub use timer::{Timer, TimerBuilder};
#[cfg(feature = "animation-timer")]
#[allow(deprecated)]
pub use animation_timer::{AnimationTimer, AnimationTimerBuilder};
#[cfg(feature = "notice")]
pub use notice::{Notice, NoticeSender, NoticeBuilder};
#[cfg(feature = "combobox")]
pub use combo_box::{ComboBox, ComboBoxFlags, ComboBoxBuilder};
#[cfg(feature = "listbox")]
pub use list_box::{ListBox, ListBoxFlags, ListBoxBuilder};
#[cfg(feature = "datetime-picker")]
pub use date_picker::{DatePicker, DatePickerValue, DatePickerFlags, DatePickerBuilder};
#[cfg(feature = "progress-bar")]
pub use progress_bar::{ProgressBar, ProgressBarState, ProgressBarFlags, ProgressBarBuilder};
#[cfg(feature = "tabs")]
pub use tabs::{TabsContainer, Tab, TabsContainerFlags, TabsContainerBuilder, TabBuilder};
#[cfg(feature = "tree-view")]
pub use treeview::{TreeView, TreeViewBuilder, TreeItem, TreeInsert, TreeItemAction, ExpandState, TreeItemState, TreeViewFlags};
#[cfg(all(feature = "tree-view-iterator", feature = "tree-view") )]
pub use treeview_iterator::TreeViewIterator;
#[cfg(feature = "tray-notification")]
pub use tray_notification::{TrayNotificationFlags, TrayNotification, TrayNotificationBuilder};
#[cfg(feature = "message-window")]
pub use message_window::{MessageWindow, MessageWindowBuilder};
#[cfg(feature = "list-view")]
pub use list_view::{ListView, ListViewStyle, ListViewBuilder, ListViewFlags, ListViewExFlags, InsertListViewItem, ListViewItem, InsertListViewColumn, ListViewColumn, ListViewColumnSortArrow, ListViewColumnFlags};
#[cfg(all(feature="list-view", feature="image-list"))]
pub use list_view::ListViewImageListType;
#[cfg(feature = "number-select")]
pub use number_select::{NumberSelect, NumberSelectBuilder, NumberSelectFlags, NumberSelectData};
#[cfg(feature = "extern-canvas")]
pub use extern_canvas::{ExternCanvas, ExternCanvasBuilder, ExternCanvasFlags};
#[cfg(feature = "frame")]
pub use frame::{Frame, FrameBuilder, FrameFlags};
#[cfg(feature = "scroll-bar")]
pub use scroll_bar::{ScrollBar, ScrollBarBuilder, ScrollBarFlags};
#[cfg(feature = "plotting")]
pub use self::plotters::{Plotters, PlottersBuilder, PlottersDrawingArea, PlottersBackend, PlottersError};
pub use handle_from_control::*;
|
#[link(wasm_import_module = "wapc")]
extern {
fn __console_log(ptr: *const u8, len: usize);
fn __guest_request(op_ptr: *mut u8, ptr: *mut u8);
fn __guest_response(ptr: *const u8, len: usize);
}
#[no_mangle]
extern fn __guest_call(op_len: i32, req_len: i32) -> i32 {
let mut op_buf = vec![0; op_len as _];
let mut req_buf = vec![0; req_len as _];
unsafe {
__guest_request(op_buf.as_mut_ptr(), req_buf.as_mut_ptr());
}
let op = String::from_utf8(op_buf).unwrap_or(String::default());
let req = String::from_utf8(req_buf).unwrap_or(String::default());
let res = format!("result-{}", req);
let log = format!("called func={}, payload={}", op, req);
unsafe {
__guest_response(res.as_ptr(), res.len());
__console_log(log.as_ptr(), log.len());
}
1
}
|
#[cfg(test)]
#[path = "./game_test.rs"]
pub mod game_test;
use std::fmt;
use std::rc::Rc;
use bat::SuperBat;
use map::{gen_rand_valid_path_from, is_adj, RoomNum};
use message::Message;
use pit::BottomlessPit;
use player::{Action, Player};
use util::*;
use wumpus::Wumpus;
pub const MAX_TRAVERSABLE: usize = 5;
pub trait Hazzard {
fn try_update(&self, state: &State) -> Option<UpdateResult>;
fn try_warn(&self, player_room: RoomNum) -> Option<&str>;
}
pub trait RoomProvider {
fn get_room(&self) -> RoomNum;
}
#[derive(Debug, PartialEq)]
pub enum UpdateResult {
Death(RunResult),
SnatchTo(RoomNum),
BumpAndLive,
BumpAndDie
}
#[derive(Debug, PartialEq)]
pub enum RunResult {
Quit,
Win,
Suicide,
RanOutOfArrows,
KilledByPit,
KilledByWumpus
}
#[derive(Clone, PartialEq, Default, Debug)]
pub struct State {
pub player: RoomNum,
pub wumpus: RoomNum,
pub pit1: RoomNum,
pub pit2: RoomNum,
pub bat1: RoomNum,
pub bat2: RoomNum,
pub arrow_count: u8,
pub is_cheating: bool
}
pub struct Game {
pub player: Box<Player>,
pub wumpus: Rc<Wumpus>,
pub pit1_room: RoomNum,
pub pit2_room: RoomNum,
pub bat1_room: RoomNum,
pub bat2_room: RoomNum,
hazzards: Vec<Rc<dyn Hazzard>>,
is_cheating: bool
}
impl Game {
pub fn new(is_cheating: bool) -> Self {
let (player, wumpus, pit1, pit2, bat1, bat2) = gen_unique_rooms();
Game::new_using(&State {
player,
wumpus,
pit1,
pit2,
bat1,
bat2,
is_cheating,
..Default::default()
})
}
pub fn new_using(s: &State) -> Self {
let player = box Player::new(s.player);
let wumpus = Rc::new(Wumpus::new(s.wumpus));
let wumpus_clone = Rc::clone(&wumpus);
let hazzards: Vec<Rc<dyn Hazzard>> = vec![
wumpus_clone,
Rc::new(BottomlessPit { room: s.pit1 }),
Rc::new(BottomlessPit { room: s.pit2 }),
Rc::new(SuperBat::new(s.bat1)),
Rc::new(SuperBat::new(s.bat2)),
];
Game {
player,
wumpus,
hazzards,
pit1_room: s.pit1,
pit2_room: s.pit2,
bat1_room: s.bat1,
bat2_room: s.bat2,
is_cheating: s.is_cheating
}
}
pub fn run(&mut self) -> RunResult {
loop {
if self.is_cheating {
println!("{}", self);
}
if let Some(run_result) = self.update() {
return run_result;
}
self.print_any_hazzard_warnings();
let action = self.player.get_action(&self.get_state());
if let Some(run_result) = self.process(&action) {
return run_result;
}
}
}
pub fn get_state(&self) -> State {
State {
player: self.player.room.get(),
wumpus: self.wumpus.room.get(),
pit1: self.pit1_room,
pit2: self.pit2_room,
bat1: self.bat1_room,
bat2: self.bat2_room,
arrow_count: self.player.arrow_count.get(),
is_cheating: self.is_cheating
}
}
fn print_any_hazzard_warnings(&self) {
self.hazzards
.iter()
.filter_map(|h| h.try_warn(self.player.room.get()))
.for_each(|warning| println!("{}", warning));
}
fn process(&mut self, action: &Action) -> Option<RunResult> {
match *action {
Action::Move(next_room) if is_adj(self.player.room.get(), next_room) => {
self.player.room.set(next_room);
None
}
Action::Quit => Some(RunResult::Quit),
Action::Shoot(ref rooms) => shoot(rooms, &self.get_state()),
_ => panic!("illegal action: {:?}", action)
}
}
fn update(&mut self) -> Option<RunResult> {
loop {
let mut is_snatched = false;
if let Some(update_result) = self.try_update() {
match update_result {
UpdateResult::Death(run_result) => {
return Some(run_result);
}
UpdateResult::SnatchTo(new_room) => {
self.player.room.set(new_room);
is_snatched = true;
println!("{}", Message::BAT_SNATCH);
}
UpdateResult::BumpAndLive => {
println!("{}", Message::WUMPUS_BUMP);
}
UpdateResult::BumpAndDie => {
println!("{}", Message::WUMPUS_BUMP);
return Some(RunResult::KilledByWumpus);
}
}
}
if !is_snatched {
break;
}
}
None
}
fn try_update(&mut self) -> Option<UpdateResult> {
let state = self.get_state();
self.hazzards
.iter()
.filter_map(|h| h.try_update(&state))
.next()
}
}
impl fmt::Display for Game {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"rooms: player {}, wumpus {}, pit1 {}, pit2 {}, bat1 {}, bat2 {}.",
self.player.room.get(),
self.wumpus.room.get(),
self.pit1_room,
self.pit2_room,
self.bat1_room,
self.bat2_room
)
}
}
impl fmt::Display for RunResult {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let msg = match *self {
RunResult::KilledByPit => format!("{}\n{}\n", Message::FELL_IN_PIT, Message::LOSE),
RunResult::KilledByWumpus => {
format!("{}\n{}\n", Message::WUMPUS_GOT_YOU, Message::LOSE)
}
RunResult::Suicide => format!("{}\n{}\n", Message::ARROW_GOT_YOU, Message::LOSE),
RunResult::RanOutOfArrows => format!("{}\n{}\n", Message::OUT_OF_ARROWS, Message::LOSE),
RunResult::Win => format!("{}\n", Message::WIN),
RunResult::Quit => String::from("")
};
write!(f, "{}", msg)
}
}
type NumRemaining = usize;
type LastTraversed = usize;
#[derive(PartialEq, Debug)]
enum ShootResult {
Miss,
Hit,
Suicide,
Remaining(NumRemaining, LastTraversed)
}
fn shoot(rooms: &[RoomNum], s: &State) -> Option<RunResult> {
// rooms length must contain the player and at least one other room to
// traverse.
if rooms.len() < 2 || rooms.len() > MAX_TRAVERSABLE + 1 {
panic!(
"shoot function called with a length out of bounds: {}",
rooms.len()
);
}
match traverse(rooms, s) {
ShootResult::Hit => Some(RunResult::Win),
ShootResult::Suicide => Some(RunResult::Suicide),
ShootResult::Miss => {
println!("{}", String::from(Message::MISSED));
if s.arrow_count == 0 {
Some(RunResult::RanOutOfArrows)
} else {
None
}
}
ShootResult::Remaining(remaining, last_traversed) => {
let remaining_rooms = gen_rand_valid_path_from(remaining, last_traversed);
shoot(&remaining_rooms, s) // recursive call at most once.
}
}
}
/// Traverse crooked arrow across rooms starting from the player's room.
fn traverse(rooms: &[RoomNum], s: &State) -> ShootResult {
for (num_traversed, w) in rooms.windows(2).enumerate() {
let a = w[0];
let b = w[1];
if !is_adj(a, b) {
return ShootResult::Remaining(rooms.len() - num_traversed, a);
}
println!("{}", b);
if b == s.player {
return ShootResult::Suicide;
}
if b == s.wumpus {
return ShootResult::Hit;
}
}
ShootResult::Miss
}
|
#[doc = "Reader of register ITLINE8"]
pub type R = crate::R<u32, super::ITLINE8>;
#[doc = "Reader of field `UCPD1`"]
pub type UCPD1_R = crate::R<bool, bool>;
#[doc = "Reader of field `UCPD2`"]
pub type UCPD2_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - UCPD1"]
#[inline(always)]
pub fn ucpd1(&self) -> UCPD1_R {
UCPD1_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - UCPD2"]
#[inline(always)]
pub fn ucpd2(&self) -> UCPD2_R {
UCPD2_R::new(((self.bits >> 1) & 0x01) != 0)
}
}
|
//! Realize the character input and output of the console
//!
//! # format output
//!
//! [`core::fmt::Write`] trait
//! -- need [`write_str`]
//! -- comes with implementation, but depends on [`write_str`] and [`write_fmt`]
//!
//! we declare one type, implement [`write_str`], then we can use [`write_fmt`] to format output
//!
//! [`write_str`]: core::fmt::Write::write_str
//! [`write_fmt`]: core::fmt::Write::write_fmt
use crate::sbi::*;
use core::fmt::{self, Write};
/// one [Zero-Sized Type], implement [`core::fmt::Write`] trait to format output
/// ZST only have one value(null), so it is an single piece in itself
struct Stdout;
impl Write for Stdout{
/// print string
/// [`console_putchar`] sbi accept one `usize` during each call,but in fact it will be used as `u8` to print string
/// so, if non ASCII char exists,we need call [`console_putchar`] for each `u8` in utf-8 encoding
fn write_str(&mut self, s: &str) -> fmt::Result {
let mut buffer = [0u8; 4];
for c in s.chars() {
for code_point in c.encode_utf8(&mut buffer).as_bytes().iter() {
console_putchar(*code_point as usize);
}
}
Ok(())
}
}
/// print data after [`core::format_args!`]'s formating
///
/// [`print!`] and [`println!`] Macro will expand into this function
///
/// [`core::format_args!`]: https://doc.rust-lang.org/nightly/core/macro.format_args.html
pub fn print(args: fmt::Arguments){
Stdout.write_fmt(args).unwrap();
}
/// implement method like `print!` macro from stdlib
///
/// use [`core::fmt::Write`] trait's [`console::Stdout`]
#[macro_export]
macro_rules! print {
($fmt: literal $(, $($arg: tt)+)?) => {
$crate::console::print(format_args!($fmt $(, $($arg)+)?));
}
}
/// implement method like `println!` macro from stdlib
///
/// use [`core::fmt::Write`] trait's [`console::Stdout`]
#[macro_export]
macro_rules! println {
($fmt: literal $(, $($arg: tt)+)?) => {
$crate::console::print(format_args!(concat!($fmt, "\n") $(, $($arg)+)?));
}
} |
use crate::otvar::{
Delta, PackedDeltas, PackedDeltasDeserializer, PackedPoints, TupleIndexFlags,
TupleVariationHeader, TupleVariationHeaderDeserializer,
};
use otspec::types::*;
use otspec::{read_field, stateful_deserializer};
use serde::de::DeserializeSeed;
use serde::de::SeqAccess;
use serde::de::Visitor;
use serde::ser::SerializeSeq;
use serde::{Serialize, Serializer};
use std::collections::VecDeque;
/// A record within a tuple variation store
///
/// This is a low-level representation of variation data, consisting of a
/// TupleVariationHeader (which serves to locate the deltas in the design space)
/// and an optimized set of deltas, some of which may be omitted due to IUP.
#[derive(Debug, PartialEq)]
pub struct TupleVariation(pub TupleVariationHeader, pub Vec<Option<Delta>>);
fn iup_segment(
newdeltas: &mut Vec<(i16, i16)>,
coords: &[(i16, i16)],
rc1: (i16, i16),
rd1: &Option<Delta>,
rc2: (i16, i16),
rd2: &Option<Delta>,
) {
let rd1 = rd1.as_ref().unwrap().get_2d();
let rd2 = rd2.as_ref().unwrap().get_2d();
let mut out_arrays: Vec<Vec<i16>> = vec![vec![], vec![]];
for j in 0..2 {
let (mut x1, mut x2, mut d1, mut d2) = if j == 0 {
(rc1.0, rc2.0, rd1.0, rd2.0)
} else {
(rc1.1, rc2.1, rd1.1, rd2.1)
};
if x1 == x2 {
let n = coords.len();
out_arrays[j].extend(std::iter::repeat(if d1 == d2 { d1 } else { 0 }).take(n));
continue;
}
if x1 > x2 {
std::mem::swap(&mut x2, &mut x1);
std::mem::swap(&mut d2, &mut d1);
}
let scale = (d2 - d1) as f32 / (x2 - x1) as f32;
for pair in coords {
let x = if j == 0 { pair.0 } else { pair.1 };
let d = if x <= x1 {
d1
} else if x >= x2 {
d2
} else {
d1 + ((x - x1) as f32 * scale) as i16
};
out_arrays[j].push(d);
}
}
newdeltas.extend(
out_arrays[0]
.iter()
.zip(out_arrays[1].iter())
.map(|(x, y)| (*x, *y)),
);
}
fn iup_contour(newdeltas: &mut Vec<(i16, i16)>, deltas: &[Option<Delta>], coords: &[(i16, i16)]) {
if deltas.iter().all(|x| x.is_some()) {
newdeltas.extend::<Vec<(i16, i16)>>(
deltas
.iter()
.map(|x| x.as_ref().unwrap().get_2d())
.collect(),
);
return;
}
let n = deltas.len();
let indices: Vec<usize> = deltas
.iter()
.enumerate()
.filter(|(_, d)| d.is_some())
.map(|(i, _)| i)
.collect();
if indices.is_empty() {
newdeltas.extend(std::iter::repeat((0, 0)).take(n));
return;
}
let mut start = indices[0];
let verystart = start;
if start != 0 {
let (i1, i2, ri1, ri2) = (0, start, start, *indices.last().unwrap());
iup_segment(
newdeltas,
&coords[i1..i2],
coords[ri1],
&deltas[ri1],
coords[ri2],
&deltas[ri2],
);
}
newdeltas.push(deltas[start].as_ref().unwrap().get_2d());
for end in indices.iter().skip(1) {
if *end - start > 1 {
let (i1, i2, ri1, ri2) = (start + 1, *end, start, *end);
iup_segment(
newdeltas,
&coords[i1..i2],
coords[ri1],
&deltas[ri1],
coords[ri2],
&deltas[ri2],
);
}
newdeltas.push(deltas[*end].as_ref().unwrap().get_2d());
start = *end;
}
if start != n - 1 {
let (i1, i2, ri1, ri2) = (start + 1, n, start, verystart);
iup_segment(
newdeltas,
&coords[i1..i2],
coords[ri1],
&deltas[ri1],
coords[ri2],
&deltas[ri2],
);
}
}
impl TupleVariation {
/// Unpacks the delta array using Interpolation of Unreferenced Points
///
/// The tuple variation record is stored in an optimized format with deltas
/// omitted if they can be computed from other surrounding deltas. This takes
/// a tuple variation record along with the original points list (from the glyf
/// table) and the indices of the end points of the contours (as the optimization
/// is done contour-wise), and returns a full list of (x,y) deltas, with the
/// implied deltas expanded.
pub fn iup_delta(&self, coords: &[(i16, i16)], ends: &[usize]) -> Vec<(i16, i16)> {
// Unlike Python the ends array has all the ends in.
let deltas = &self.1;
if deltas.iter().all(|x| x.is_some()) {
// No IUP needed
return self
.1
.iter()
.map(|x| x.as_ref().unwrap().get_2d())
.collect();
}
let mut newdeltas = vec![];
let mut start = 0;
for end in ends {
let contour_delta = &deltas[start..end + 1];
let contour_orig = &coords[start..end + 1];
start = end + 1;
iup_contour(&mut newdeltas, contour_delta, contour_orig);
}
newdeltas
}
}
/// A Tuple Variation Store
///
/// A tuple variation store is the way that OpenType internally represents
/// variation records in the `gvar` and `cvt` tables.
#[derive(Debug, PartialEq)]
pub struct TupleVariationStore(pub Vec<TupleVariation>);
stateful_deserializer!(
TupleVariationStore,
TupleVariationStoreDeserializer,
{
axis_count: uint16,
is_gvar: bool,
point_count: uint16
},
fn visit_seq<A>(self, mut seq: A) -> std::result::Result<TupleVariationStore, A::Error>
where
A: SeqAccess<'de>,
{
// Begin with the "GlyphVariationData header"
let packed_count = read_field!(seq, uint16, "a packed count");
let count = packed_count & 0x0FFF;
let points_are_shared = (packed_count & 0x8000) != 0;
let mut shared_points = vec![];
let _data_offset = read_field!(seq, uint16, "a data offset");
// Read the headers
let mut headers: Vec<TupleVariationHeader> = vec![];
let mut variations: Vec<TupleVariation> = vec![];
for _ in 0..count {
headers.push(
seq.next_element_seed(TupleVariationHeaderDeserializer {
axis_count: self.axis_count,
})?
.unwrap(),
);
}
// Now we are into the "serialized data block"
// ...which begins with Shared "point" numbers (optional per flag in the header)
if points_are_shared {
shared_points = match read_field!(seq, PackedPoints, "packed points").points {
Some(pts) => pts,
None => (0..self.point_count).collect()
};
}
// And finally per-tuple variation data
for header in headers {
let mut points_for_this_header: VecDeque<u16>;
/* Private points? */
if header
.flags
.contains(TupleIndexFlags::PRIVATE_POINT_NUMBERS)
{
let private_points = read_field!(seq, PackedPoints, "packed points");
if private_points.points.is_some() {
points_for_this_header = private_points.points.unwrap().clone().into();
} else {
points_for_this_header = (0..self.point_count).collect();
}
} else {
points_for_this_header = shared_points.clone().into();
}
let mut deltas:VecDeque<Delta> = if self.is_gvar {
let packed_x = seq.next_element_seed(PackedDeltasDeserializer { num_points: points_for_this_header.len() })?.unwrap().0;
let packed_y = seq.next_element_seed(PackedDeltasDeserializer { num_points: points_for_this_header.len() })?.unwrap().0;
packed_x.iter().zip(packed_y.iter()).map(|(x,y)| Delta::Delta2D((*x,*y)) ).collect()
} else {
let packed = seq.next_element_seed(PackedDeltasDeserializer { num_points: points_for_this_header.len() })?.unwrap().0;
packed.iter().map(|x| Delta::Delta1D(*x) ).collect()
};
let mut all_deltas:Vec<Option<Delta>> = vec![];
for i in 0..self.point_count {
if !points_for_this_header.is_empty() && i == points_for_this_header[0] {
all_deltas.push(Some(deltas.pop_front().unwrap()));
points_for_this_header.pop_front();
} else {
all_deltas.push(None); // IUP needed later
}
}
variations.push( TupleVariation(header, all_deltas))
}
Ok(TupleVariationStore(variations))
}
);
impl Serialize for TupleVariationStore {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut ser = serializer.serialize_seq(None)?;
let packed_count: uint16 = self.0.len() as uint16 | 0x8000; // Shared points only!
ser.serialize_element(&packed_count)?;
let mut serialized_headers = vec![];
let mut serialized_data_block: Vec<u8> = vec![];
// Shared points go here
#[allow(clippy::vec_init_then_push)]
let _ = serialized_data_block.push(0); // This is dummy code
let mut last_delta_len = serialized_data_block.len();
for var in &self.0 {
// For each glyph
let header = &var.0;
// We need to set .flags here
let deltas = &var.1;
// Private point numbers go here
// println!("Last length was {:?}", last_delta_len);
let mut dx = vec![];
let mut dy = vec![];
for d in deltas.iter().flatten() {
match d {
Delta::Delta1D(d) => {
dx.push(*d);
}
Delta::Delta2D((x, y)) => {
dx.push(*x);
dy.push(*y);
}
}
}
// Remove the .clones here when things are fixed, they're only needed for a later println
serialized_data_block.extend(otspec::ser::to_bytes(&PackedDeltas(dx.clone())).unwrap());
if !dy.is_empty() {
serialized_data_block
.extend(otspec::ser::to_bytes(&PackedDeltas(dy.clone())).unwrap());
}
// println!("Serializing a TVH (will fix size later): {:?}", header);
let mut serialized_header = otspec::ser::to_bytes(&header).unwrap();
// println!("Current data block {:?}", serialized_data_block);
// println!("Current length is {:?}", serialized_data_block.len());
let data_size = (serialized_data_block.len() - last_delta_len) as u16;
// println!("Data size is {:?}", data_size);
let size: Vec<u8> = otspec::ser::to_bytes(&data_size).unwrap();
// Set header size
serialized_header[0] = size[0];
serialized_header[1] = size[1];
// println!(" header as bytes {:?}", serialized_header);
// println!(" X deltas {:?}", dx);
// println!(" Y deltas {:?}", dy);
// println!(
// " data for this header: {:?}",
// serialized_data_block[last_delta_len..serialized_data_block.len()].to_vec()
// );
last_delta_len = serialized_data_block.len();
serialized_headers.extend(serialized_header);
}
let data_offset: uint16 = 4 + (serialized_headers.len() as uint16);
ser.serialize_element(&data_offset)?;
ser.serialize_element(&serialized_headers)?;
ser.serialize_element(&serialized_data_block)?;
ser.end()
}
}
#[cfg(test)]
mod tests {
use crate::otvar::Delta::Delta2D;
use crate::otvar::TupleVariation;
use crate::otvar::TupleVariationHeader;
use crate::otvar::{TupleIndexFlags, TupleVariationStore, TupleVariationStoreDeserializer};
use serde::de::DeserializeSeed;
#[test]
fn test_tvs_de() {
let binary_tvs: Vec<u8> = vec![
0x80, 0x01, 0x00, 0x0a, 0x00, 0x21, 0x80, 0x00, 0x20, 0x00, 0x00, 0x06, 0xcb, 0xd0,
0xb7, 0xbb, 0x00, 0xf0, 0x8c, 0x40, 0xff, 0x7c, 0x03, 0xb5, 0xd2, 0xc3, 0x00, 0x40,
0xfe, 0xe0, 0x81, 0x0a, 0x08, 0xfd, 0xfd, 0x08, 0x08, 0xe4, 0xe4, 0x08, 0xc5, 0xc5,
0xeb, 0x83,
];
let mut de = otspec::de::Deserializer::from_bytes(&binary_tvs);
let cs = TupleVariationStoreDeserializer {
axis_count: 1,
point_count: 15,
is_gvar: true,
};
let tvs = cs.deserialize(&mut de).unwrap();
let expected = TupleVariationStore(vec![TupleVariation(
TupleVariationHeader {
size: 33,
flags: TupleIndexFlags::EMBEDDED_PEAK_TUPLE,
sharedTupleIndex: 0,
peakTuple: Some(vec![0.5]),
startTuple: None,
endTuple: None,
},
vec![
Some(Delta2D((-53, 8))),
Some(Delta2D((-48, -3))),
Some(Delta2D((-73, -3))),
Some(Delta2D((-69, 8))),
Some(Delta2D((0, 8))),
Some(Delta2D((-16, -28))),
Some(Delta2D((-116, -28))),
Some(Delta2D((-132, 8))),
Some(Delta2D((-75, -59))),
Some(Delta2D((-46, -59))),
Some(Delta2D((-61, -21))),
Some(Delta2D((0, 0))),
Some(Delta2D((-288, 0))),
Some(Delta2D((0, 0))),
Some(Delta2D((0, 0))),
],
)]);
assert_eq!(tvs, expected);
}
#[test]
fn test_tvs_ser() {
let expected: Vec<u8> = vec![
0x80, 0x01, /* tupleVariationCount. SHARED_POINT_NUMBERS */
0x00, 0x0a, /* dataOffset */
/* TVH */
0x00, 0x21, /* variationDataSize: 33 bytes */
0x80, 0x00, /* tuple index. EMBEDDED_PEAK_TUPLE */
0x20, 0x00, /* Peak tuple record */
0x00, /* Shared point numbers */
/* per-tuple variation data */
0x06, 0xcb, 0xd0, 0xb7, 0xbb, 0x00, 0xf0, 0x8c, 0x40, 0xff, 0x7c, 0x03, 0xb5, 0xd2,
0xc3, 0x00, 0x40, 0xfe, 0xe0, 0x81, 0x0a, 0x08, 0xfd, 0xfd, 0x08, 0x08, 0xe4, 0xe4,
0x08, 0xc5, 0xc5, 0xeb, 0x83,
];
let tvs = TupleVariationStore(vec![TupleVariation(
TupleVariationHeader {
size: 33,
flags: TupleIndexFlags::EMBEDDED_PEAK_TUPLE,
sharedTupleIndex: 0,
peakTuple: Some(vec![0.5]),
startTuple: None,
endTuple: None,
},
vec![
Some(Delta2D((-53, 8))),
Some(Delta2D((-48, -3))),
Some(Delta2D((-73, -3))),
Some(Delta2D((-69, 8))),
Some(Delta2D((0, 8))),
Some(Delta2D((-16, -28))),
Some(Delta2D((-116, -28))),
Some(Delta2D((-132, 8))),
Some(Delta2D((-75, -59))),
Some(Delta2D((-46, -59))),
Some(Delta2D((-61, -21))),
Some(Delta2D((0, 0))),
Some(Delta2D((-288, 0))),
Some(Delta2D((0, 0))),
Some(Delta2D((0, 0))),
],
)]);
let binary_tvs = otspec::ser::to_bytes(&tvs).unwrap();
assert_eq!(binary_tvs, expected);
}
}
|
// Advent of Code: Day 10
//
// We have a complicated situation with various power adapters on a
// plane, but essentially we have a list of numbers and we want to
// sort the numbers and then count up the number of distinct differences
// between the numbers. For example, how many numbers when sorted have
// a difference of 1 from each other, how many have a difference of 3.
// The answer is the count of diffs of 1 multiplied by the count of diffs
// of 3.
//
// For the small input the answer is 7 * 5 = 35, for the test input the
// answer is 22 * 10 = 220.
//
// Usage: cargo run <input-file>
use std::{collections::HashMap, env, fs::File, io::BufRead, io::BufReader};
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
println!("Usage: cargo run <input-file>");
return;
}
let input_file = &args[1];
let file = File::open(input_file).expect("no such file");
let buf = BufReader::new(file);
let mut lines: Vec<usize> = buf
.lines()
.map(|l| l.expect("could not parse line"))
.map(|s| s.parse().expect("must be a positive integer"))
.collect();
lines.sort();
let mut differences: HashMap<usize, usize> = HashMap::new();
for i in 0..(lines.len() - 1) {
let diff = lines[i + 1] - lines[i];
*differences.entry(diff).or_insert(0) += 1;
}
// adding one for the outlet to the first adapter
let diffs_of_1 = differences.get(&1).unwrap() + 1;
// adding one from the last adapter to the device
let diffs_of_3 = differences.get(&3).unwrap() + 1;
println!("Differences of 1: {}", diffs_of_1);
println!("Differences of 3: {}", diffs_of_3);
println!("The product of those is {}", diffs_of_1 * diffs_of_3);
}
|
extern crate url;
extern crate regex;
#[macro_use] extern crate lazy_static;
extern crate idna;
mod types;
mod ip;
mod email;
mod length;
mod range;
mod urls;
mod must_match;
mod contains;
pub use types::{Errors, Validate, Validator};
pub use ip::{validate_ip, validate_ip_v4, validate_ip_v6};
pub use email::{validate_email};
pub use length::{HasLen, validate_length};
pub use range::{validate_range};
pub use urls::{validate_url};
pub use must_match::{validate_must_match};
pub use contains::{Contains, validate_contains};
|
use chrono::Duration;
use iso8601_duration as iso8601;
use crate::{InputValueError, InputValueResult, Scalar, ScalarType, Value};
/// Implement the Duration scalar
///
/// The input/output is a string in ISO8601 format.
#[Scalar(
internal,
name = "Duration",
specified_by_url = "https://en.wikipedia.org/wiki/ISO_8601#Durations"
)]
impl ScalarType for Duration {
fn parse(value: Value) -> InputValueResult<Self> {
match &value {
Value::String(s) => Ok(Duration::from_std(iso8601::Duration::parse(s)?.to_std())?),
_ => Err(InputValueError::expected_type(value)),
}
}
fn to_value(&self) -> Value {
Value::String(self.to_string())
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_config::GlobalConfig;
use common_exception::ErrorCode;
use common_exception::Result;
use crate::interpreters::access::AccessChecker;
use crate::sql::plans::Plan;
pub struct ManagementModeAccess;
impl ManagementModeAccess {
pub fn create() -> Box<dyn AccessChecker> {
Box::new(ManagementModeAccess)
}
}
#[async_trait::async_trait]
impl AccessChecker for ManagementModeAccess {
// Check what we can do if in management mode.
async fn check(&self, plan: &Plan) -> Result<()> {
// Allows for management-mode.
if GlobalConfig::instance().query.management_mode {
let ok = match plan {
Plan::Query {rewrite_kind, .. } => {
use common_sql::plans::RewriteKind;
match rewrite_kind {
Some(ref v) => matches!(v,
RewriteKind::ShowDatabases
| RewriteKind::ShowTables
| RewriteKind::ShowColumns
| RewriteKind::ShowEngines
| RewriteKind::ShowSettings
| RewriteKind::ShowFunctions
| RewriteKind::ShowTableFunctions
| RewriteKind::ShowUsers
| RewriteKind::ShowStages
| RewriteKind::DescribeStage
| RewriteKind::ListStage
| RewriteKind::ShowRoles),
_ => false
}
},
// Show.
Plan::ShowCreateDatabase(_)
| Plan::ShowCreateTable(_)
| Plan::ShowGrants(_)
// Set
| Plan::SetVariable(_)
// Database.
| Plan::CreateDatabase(_)
| Plan::DropDatabase(_)
// Table.
| Plan::DescribeTable(_)
| Plan::CreateTable(_)
| Plan::DropTable(_)
// User.
| Plan::AlterUser(_)
| Plan::CreateUser(_)
| Plan::DropUser(_)
// Privilege.
| Plan::GrantPriv(_)
| Plan::RevokePriv(_)
| Plan::GrantRole(_)
| Plan::RevokeRole(_)
// Stage.
| Plan::CreateStage(_)
| Plan::DropStage(_)
// UDF
| Plan::CreateUDF(_)
| Plan::AlterUDF(_)
| Plan::DropUDF(_)
| Plan::UseDatabase(_)
| Plan::Call(_) => true,
_ => false
};
if !ok {
return Err(ErrorCode::ManagementModePermissionDenied(format!(
"Access denied for operation:{:?} in management-mode",
plan.format_indent()
)));
}
};
Ok(())
}
}
|
#[test]
fn smoke_test() {
let _it = timeit::timeit("loooong");
std::thread::sleep(std::time::Duration::from_millis(199));
}
|
use crate::config::ScriptDeviceConfiguration;
use crate::utils::LogCommandExt;
use crate::*;
use anyhow::bail;
use std::{fmt, fs, process};
#[derive(Debug, Clone)]
pub struct ScriptDevice {
pub id: String,
pub conf: ScriptDeviceConfiguration,
}
impl ScriptDevice {
fn command(&self, _build: &Build) -> Result<process::Command> {
if fs::metadata(&self.conf.path).is_err() {
bail!("Can not read {:?} for {}.", self.conf.path, self.id);
}
let mut cmd = process::Command::new(&self.conf.path);
cmd.env("DINGHY_TEST_DATA", &*self.id);
cmd.env("DINGHY_DEVICE", &*self.id);
if let Some(ref pf) = self.conf.platform {
cmd.env("DINGHY_PLATFORM", &*pf);
}
Ok(cmd)
}
}
impl Device for ScriptDevice {
fn clean_app(&self, _build_bundle: &BuildBundle) -> Result<()> {
Ok(())
}
fn debug_app(
&self,
_project: &Project,
_build: &Build,
_args: &[&str],
_envs: &[&str],
) -> Result<BuildBundle> {
unimplemented!()
}
fn id(&self) -> &str {
&self.id
}
fn name(&self) -> &str {
&self.id
}
fn run_app(
&self,
project: &Project,
build: &Build,
args: &[&str],
envs: &[&str],
) -> Result<BuildBundle> {
let root_dir = build.target_path.join("dinghy");
let bundle_path = &build.runnable.source;
log::trace!("About to start runner script...");
let test_data_path = project.link_test_data(&build.runnable)?;
let status = self
.command(build)?
.arg(&build.runnable.exe)
.current_dir(&build.runnable.source)
.env("DINGHY_TEST_DATA_PATH", test_data_path)
.args(args)
.envs(
envs.iter()
.map(|kv| {
Ok((
kv.split("=")
.nth(0)
.ok_or_else(|| anyhow!("Wrong env spec"))?,
kv.split("=")
.nth(1)
.ok_or_else(|| anyhow!("Wrong env spec"))?,
))
})
.collect::<Result<Vec<_>>>()?,
)
.log_invocation(1)
.status()?;
if !status.success() {
bail!("Test failed")
}
Ok(BuildBundle {
id: build.runnable.id.clone(),
bundle_dir: bundle_path.to_path_buf(),
bundle_exe: build.runnable.exe.to_path_buf(),
lib_dir: build.target_path.clone(),
root_dir: root_dir.clone(),
})
}
}
impl DeviceCompatibility for ScriptDevice {
fn is_compatible_with_regular_platform(&self, platform: &RegularPlatform) -> bool {
self.conf
.platform
.as_ref()
.map_or(false, |it| *it == platform.id)
}
}
impl Display for ScriptDevice {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", self.id)
}
}
|
#[doc(inline)]
pub use config::Config;
#[doc(inline)]
pub use scraper::{LoadedData, Scraper};
pub mod config;
pub mod data;
pub mod scraper;
|
extern crate itertools;
#[cfg(test)]
extern crate quickcheck;
extern crate llvm_sys as llvm;
pub mod ir;
pub mod opt;
pub mod backend;
#[cfg(test)]
mod tests {
use super::{ir, backend, opt};
use ir::Atom;
use quickcheck::{quickcheck, TestResult};
use std::io::Cursor;
const LOOP_LIMIT: usize = 255 * 4;
fn get_output(ir: &Vec<Atom>, input: &Vec<u8>) -> Result<Vec<u8>, String> {
let mut output_buf = Cursor::new(Vec::<u8>::new());
let result = {
let interpreter = backend::Interpreter::new(
Cursor::new(input),
&mut output_buf,
Some(LOOP_LIMIT)
);
backend::use_backend(interpreter, &ir)
};
match result {
Ok(_) => Ok(output_buf.into_inner()),
Err(err) => Err(format!("{:?}", err)),
}
}
#[test]
fn quickcheck_opt_no_change() {
fn opt_no_change(prog: Vec<u8>, input: Vec<u8>) -> TestResult {
let ir = if let Ok(ir) = ir::build_ir(&prog) {
ir
} else {
return TestResult::discard();
};
const MAX_PROG_SIZE: usize = 1_000_000;
if prog.len() >= MAX_PROG_SIZE {
return TestResult::discard();
}
let opt_ir = opt::run_opts(ir.clone());
let normal_output = get_output(&ir, &input);
let opt_output = get_output(&opt_ir, &input);
TestResult::from_bool(normal_output == opt_output)
}
quickcheck(opt_no_change as fn(Vec<u8>, Vec<u8>) -> TestResult);
}
#[test]
fn quickcheck_opt_idempotent() {
fn opt_idempotent(prog: Vec<u8>) -> TestResult {
let ir = if let Ok(ir) = ir::build_ir(&prog) {
ir
} else {
return TestResult::discard();
};
let opt1 = opt::run_opts(ir);
let opt2 = opt::run_opts(opt1.clone());
TestResult::from_bool(opt1 == opt2)
}
quickcheck(opt_idempotent as fn(Vec<u8>) -> TestResult);
}
}
|
use crate::allocator::block::Block;
use crate::prelude::*;
use utilities::prelude::*;
use std::marker::PhantomData;
use std::sync::Arc;
#[derive(Debug)]
pub struct Memory<T> {
device: Arc<Device>,
pub(crate) block: Block,
data_type: PhantomData<T>,
}
impl<T> Memory<T> {
pub(crate) fn forced_requirements(
device: &Arc<Device>,
memory_properties: VkMemoryPropertyFlagBits,
buffer: VkBuffer,
memory_requirements: VkMemoryRequirements,
) -> VerboseResult<Arc<Memory<T>>> {
let memory = Self::new(device, memory_requirements, memory_properties)?;
device.bind_buffer_memory(buffer, memory.block.memory(), memory.block.offset)?;
Ok(memory)
}
pub(crate) fn buffer_memory(
device: &Arc<Device>,
memory_properties: VkMemoryPropertyFlagBits,
buffer: VkBuffer,
) -> VerboseResult<Arc<Memory<T>>> {
let memory_requirements = device.buffer_memory_requirements(buffer);
Self::forced_requirements(device, memory_properties, buffer, memory_requirements)
}
pub(crate) fn image_memory(
device: &Arc<Device>,
memory_properties: VkMemoryPropertyFlagBits,
image: VkImage,
) -> VerboseResult<Arc<Memory<T>>> {
let memory_requirements = device.image_memory_requirements(image);
let memory = Self::new(device, memory_requirements, memory_properties)?;
device.bind_image_memory(image, memory.block.memory(), memory.block.offset)?;
Ok(memory)
}
fn new(
device: &Arc<Device>,
memory_requirements: VkMemoryRequirements,
memory_properties: VkMemoryPropertyFlagBits,
) -> VerboseResult<Arc<Memory<T>>> {
let memory_type_index = device
.memory_type_from_properties(memory_requirements.memoryTypeBits, memory_properties)?;
let block = Device::allocate_memory_from_allocator(
device,
memory_requirements.size,
memory_type_index,
memory_requirements.alignment,
)?;
Ok(Arc::new(Memory {
device: device.clone(),
block,
data_type: PhantomData,
}))
}
pub(crate) fn vk_handle(&self) -> VkDeviceMemory {
self.block.memory()
}
}
impl<T> VulkanDevice for Memory<T> {
fn device(&self) -> &Arc<Device> {
&self.device
}
}
impl<T: Clone> Memory<T> {
pub fn map(&self, length: VkDeviceSize) -> VerboseResult<VkMappedMemory<'_, T>> {
self.block.map(length)
}
}
impl<T> Drop for Memory<T> {
fn drop(&mut self) {
self.device.free_memory_from_allocator(&self.block).unwrap();
}
}
|
use super::*;
#[derive(Debug, PartialEq)]
pub struct Method {
name: String,
params: MethodParameters,
body: Node,
}
#[derive(Debug, PartialEq)]
pub struct MethodParameters {
pub required: Vec<String>,
pub optional: Vec<Parameter>,
pub array: Option<String>,
pub proc: Option<String>,
}
#[derive(Debug, PartialEq)]
pub struct Parameter {
pub name: String,
pub default_value: Option<Box<Node>>,
}
#[allow(dead_code)]
impl Parameter {
pub(crate) fn new_required(name: &str) -> String {
name.to_owned()
}
pub(crate) fn new_optional(name: &str, default_value: Node) -> Self {
Self {
name: name.to_owned(),
default_value: Some(Box::new(default_value)),
}
}
}
|
use crate::input_error::InputError;
pub struct ExpenseReport {
values: Vec<usize>,
}
impl ExpenseReport {
pub fn new(values: &Vec<String>) -> Result<ExpenseReport, InputError> {
let mut parsed_values = Vec::<usize>::new();
for value in values {
parsed_values.push(
value
.parse::<usize>()
.map_err(InputError::Parse)?
);
}
return Ok(
ExpenseReport { values: parsed_values },
);
}
pub fn product_from_target_two_sum(&mut self, target_sum: usize) -> usize {
self.values.retain(|&v| v <= target_sum);
for value1 in &self.values {
for value2 in (&self.values).into_iter().filter(|&v| value1 + v == target_sum).collect::<Vec<&usize>>() {
return value1 * value2;
}
}
return 0;
}
pub fn product_from_target_three_sum(&mut self, target_sum: usize) -> usize {
self.values.retain(|&v| v <= target_sum);
for value1 in &self.values {
for value2 in (&self.values).into_iter().filter(|&v| value1 + v <= target_sum).collect::<Vec<&usize>>() {
for value3 in (&self.values).into_iter().filter(|&v| value1 + value2 + v == target_sum).collect::<Vec<&usize>>() {
return value1 * value2 * value3;
}
}
}
return 0;
}
}
#[cfg(test)]
mod tests {
use super::InputError;
use super::ExpenseReport;
#[test]
fn product_from_target_two_sum() -> Result<(), InputError> {
let values = vec!(
String::from("1721"),
String::from("979"),
String::from("366"),
String::from("299"),
String::from("675"),
String::from("1456"),
);
let mut expense_report = ExpenseReport::new(&values)?;
let target_sum = 2020;
let actual = expense_report.product_from_target_two_sum(target_sum);
assert_eq!(514579, actual);
return Ok(());
}
#[test]
fn product_from_target_three_sum() -> Result<(), InputError> {
let values = vec!(
String::from("1721"),
String::from("979"),
String::from("366"),
String::from("299"),
String::from("675"),
String::from("1456"),
);
let mut expense_report = ExpenseReport::new(&values)?;
let target_sum = 2020;
let actual = expense_report.product_from_target_three_sum(target_sum);
assert_eq!(241861950, actual);
return Ok(());
}
}
|
use std::fs;
use std::path::Path;
pub struct TestResources {
file: String,
}
impl TestResources {
pub fn new(file: &str) -> TestResources {
TestResources {
file: String::from(file),
}
}
}
impl Drop for TestResources {
fn drop(&mut self) {
let path = Path::new(&self.file);
if path.exists() {
let _ignore = fs::remove_file(path);
}
}
}
#[macro_export]
macro_rules! set_test_rsc {
($file_name:expr) => {
let _test_rsc = common::TestResources::new($file_name);
};
}
#[macro_export]
macro_rules! ser_method {
($ser_method_int:expr) => {
SerializationMethod::from($ser_method_int)
};
}
#[macro_export]
macro_rules! test_setup {
($function_name:expr, $ser_method_int:expr, $db_name:ident) => {
let $db_name = format!(
"{}_{}.db",
$function_name,
ser_method!($ser_method_int).to_string()
);
set_test_rsc!(&$db_name);
};
}
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Fast path, main can see the concrete type returned.
fn before() -> impl FnMut(i32) {
let mut p = Box::new(0);
move |x| *p = x
}
fn send<T: Send>(_: T) {}
fn main() {
send(before());
send(after());
}
// Deferred path, main has to wait until typeck finishes,
// to check if the return type of after is Send.
fn after() -> impl FnMut(i32) {
let mut p = Box::new(0);
move |x| *p = x
}
|
use tree_sitter::Node;
use crate::lint::core::Filter;
pub struct NothingFilter;
impl Filter for NothingFilter {
fn filter(&self, _node: &Node, _source: &str) -> bool {
true
}
}
|
use digest::Digest;
use num_bigint::BigUint;
use rand::{thread_rng, Rng};
// This is mostly taken from https://github.com/RustCrypto/RSA/pull/18
// For the love of crypto, please delete as much of this as possible and use the RSA crate
// directly when that PR is merged
pub fn encrypt<D: Digest>(key: &[u8], message: &[u8]) -> crate::Result<Vec<u8>> {
let key = std::str::from_utf8(key).map_err(|_err| {
// TODO(@abonander): protocol_err doesn't like referring to [err]
protocol_err!("unexpected error decoding what should be UTF-8")
})?;
let key = parse(key)?;
Ok(oaep_encrypt::<_, D>(&mut thread_rng(), &key, message)?)
}
// https://github.com/RustCrypto/RSA/blob/9f1464c43831d422d9903574aad6ab072db9f2b0/src/internals.rs#L12
fn internals_encrypt(key: &PublicKey, m: &BigUint) -> BigUint {
m.modpow(&key.e, &key.n)
}
// https://github.com/RustCrypto/RSA/blob/9f1464c43831d422d9903574aad6ab072db9f2b0/src/internals.rs#L184
fn internals_copy_with_left_pad(dest: &mut [u8], src: &[u8]) {
// left pad with zeros
let padding_bytes = dest.len() - src.len();
for el in dest.iter_mut().take(padding_bytes) {
*el = 0;
}
dest[padding_bytes..].copy_from_slice(src);
}
// https://github.com/RustCrypto/RSA/blob/9f1464c43831d422d9903574aad6ab072db9f2b0/src/oaep.rs#L13
fn internals_inc_counter(counter: &mut [u8]) {
if counter[3] == u8::max_value() {
counter[3] = 0;
} else {
counter[3] += 1;
return;
}
if counter[2] == u8::max_value() {
counter[2] = 0;
} else {
counter[2] += 1;
return;
}
if counter[1] == u8::max_value() {
counter[1] = 0;
} else {
counter[1] += 1;
return;
}
if counter[0] == u8::max_value() {
counter[0] = 0u8;
counter[1] = 0u8;
counter[2] = 0u8;
counter[3] = 0u8;
} else {
counter[0] += 1;
}
}
// https://github.com/RustCrypto/RSA/blob/9f1464c43831d422d9903574aad6ab072db9f2b0/src/oaep.rs#L46
fn oeap_mgf1_xor<D: Digest>(out: &mut [u8], digest: &mut D, seed: &[u8]) {
let mut counter = vec![0u8; 4];
let mut i = 0;
while i < out.len() {
let mut digest_input = vec![0u8; seed.len() + 4];
digest_input[0..seed.len()].copy_from_slice(seed);
digest_input[seed.len()..].copy_from_slice(&counter);
digest.input(digest_input.as_slice());
let digest_output = &*digest.result_reset();
let mut j = 0;
loop {
if j >= digest_output.len() || i >= out.len() {
break;
}
out[i] ^= digest_output[j];
j += 1;
i += 1;
}
internals_inc_counter(counter.as_mut_slice());
}
}
// https://github.com/RustCrypto/RSA/blob/9f1464c43831d422d9903574aad6ab072db9f2b0/src/oaep.rs#L75
fn oaep_encrypt<R: Rng, D: Digest>(
rng: &mut R,
pub_key: &PublicKey,
msg: &[u8],
) -> crate::Result<Vec<u8>> {
// size of [n] in bytes
let k = (pub_key.n.bits() + 7) / 8;
let mut digest = D::new();
let h_size = D::output_size();
if msg.len() > k - 2 * h_size - 2 {
return Err(protocol_err!("mysql: password too long").into());
}
let mut em = vec![0u8; k];
let (_, payload) = em.split_at_mut(1);
let (seed, db) = payload.split_at_mut(h_size);
rng.fill(seed);
// Data block DB = pHash || PS || 01 || M
let db_len = k - h_size - 1;
let p_hash = digest.result_reset();
db[0..h_size].copy_from_slice(&*p_hash);
db[db_len - msg.len() - 1] = 1;
db[db_len - msg.len()..].copy_from_slice(msg);
oeap_mgf1_xor(db, &mut digest, seed);
oeap_mgf1_xor(seed, &mut digest, db);
{
let m = BigUint::from_bytes_be(&em);
let c = internals_encrypt(pub_key, &m).to_bytes_be();
internals_copy_with_left_pad(&mut em, &c);
}
Ok(em)
}
#[derive(Debug)]
struct PublicKey {
n: BigUint,
e: BigUint,
}
fn parse(key: &str) -> crate::Result<PublicKey> {
// This takes advantage of the knowledge that we know
// we are receiving a PKCS#8 RSA Public Key at all
// times from MySQL
if !key.starts_with("-----BEGIN PUBLIC KEY-----\n") {
return Err(protocol_err!(
"unexpected format for RSA Public Key from MySQL (expected PKCS#8); first line: {:?}",
key.splitn(1, '\n').next()
)
.into());
}
let key_with_trailer = key.trim_start_matches("-----BEGIN PUBLIC KEY-----\n");
let trailer_pos = key_with_trailer.find('-').unwrap_or(0);
let inner_key = key_with_trailer[..trailer_pos].replace('\n', "");
let inner = base64::decode(&inner_key).map_err(|_err| {
// TODO(@abonander): protocol_err doesn't like referring to [err]
protocol_err!("unexpected error decoding what should be base64-encoded data")
})?;
let len = inner.len();
let n_bytes = &inner[(len - 257 - 5)..(len - 5)];
let e_bytes = &inner[(len - 3)..];
let n = BigUint::from_bytes_be(n_bytes);
let e = BigUint::from_bytes_be(e_bytes);
Ok(PublicKey { n, e })
}
#[cfg(test)]
mod tests {
use super::{BigUint, PublicKey};
use rand::rngs::adapter::ReadRng;
use sha1::Sha1;
const INPUT: &str = "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv9E+l0oFIoGnZmu6bdil\nI3WK79iug/hukj5QrWRrJVVCHL8rRxNsQGYPvQfXgqEnJW0Rqy2BBebNrnSMduny\nCazz1KM1h57hSI1xHGhg/o82Us1j9fUucKo0Pt3vg7xjVVcN0j1bwr96gEbt6B4Q\nt4eKZBhtle1bgoBcqFBhGfU17cnedSzMUCutM+kXTzzOTplKoqXeJpEZDTX8AP9F\nQ9JkoA22yTn8H2GROIAffm1UQS7DXXjI5OnzBJNs72oNSeK8i72xLkoSdfVw3vCu\ni+mpt4LJgAZLvzc2O4nLzu4Bljb+Mrch34HSWyxOfWzt1v9vpJfEVQ2/VZaIng6U\nUQIDAQAB\n-----END PUBLIC KEY-----\n";
#[test]
fn it_parses() {
let key = super::parse(INPUT).unwrap();
let n = &[
0xbf, 0xd1, 0x3e, 0x97, 0x4a, 0x5, 0x22, 0x81, 0xa7, 0x66, 0x6b, 0xba, 0x6d, 0xd8,
0xa5, 0x23, 0x75, 0x8a, 0xef, 0xd8, 0xae, 0x83, 0xf8, 0x6e, 0x92, 0x3e, 0x50, 0xad,
0x64, 0x6b, 0x25, 0x55, 0x42, 0x1c, 0xbf, 0x2b, 0x47, 0x13, 0x6c, 0x40, 0x66, 0xf,
0xbd, 0x7, 0xd7, 0x82, 0xa1, 0x27, 0x25, 0x6d, 0x11, 0xab, 0x2d, 0x81, 0x5, 0xe6, 0xcd,
0xae, 0x74, 0x8c, 0x76, 0xe9, 0xf2, 0x9, 0xac, 0xf3, 0xd4, 0xa3, 0x35, 0x87, 0x9e,
0xe1, 0x48, 0x8d, 0x71, 0x1c, 0x68, 0x60, 0xfe, 0x8f, 0x36, 0x52, 0xcd, 0x63, 0xf5,
0xf5, 0x2e, 0x70, 0xaa, 0x34, 0x3e, 0xdd, 0xef, 0x83, 0xbc, 0x63, 0x55, 0x57, 0xd,
0xd2, 0x3d, 0x5b, 0xc2, 0xbf, 0x7a, 0x80, 0x46, 0xed, 0xe8, 0x1e, 0x10, 0xb7, 0x87,
0x8a, 0x64, 0x18, 0x6d, 0x95, 0xed, 0x5b, 0x82, 0x80, 0x5c, 0xa8, 0x50, 0x61, 0x19,
0xf5, 0x35, 0xed, 0xc9, 0xde, 0x75, 0x2c, 0xcc, 0x50, 0x2b, 0xad, 0x33, 0xe9, 0x17,
0x4f, 0x3c, 0xce, 0x4e, 0x99, 0x4a, 0xa2, 0xa5, 0xde, 0x26, 0x91, 0x19, 0xd, 0x35,
0xfc, 0x0, 0xff, 0x45, 0x43, 0xd2, 0x64, 0xa0, 0xd, 0xb6, 0xc9, 0x39, 0xfc, 0x1f, 0x61,
0x91, 0x38, 0x80, 0x1f, 0x7e, 0x6d, 0x54, 0x41, 0x2e, 0xc3, 0x5d, 0x78, 0xc8, 0xe4,
0xe9, 0xf3, 0x4, 0x93, 0x6c, 0xef, 0x6a, 0xd, 0x49, 0xe2, 0xbc, 0x8b, 0xbd, 0xb1, 0x2e,
0x4a, 0x12, 0x75, 0xf5, 0x70, 0xde, 0xf0, 0xae, 0x8b, 0xe9, 0xa9, 0xb7, 0x82, 0xc9,
0x80, 0x6, 0x4b, 0xbf, 0x37, 0x36, 0x3b, 0x89, 0xcb, 0xce, 0xee, 0x1, 0x96, 0x36, 0xfe,
0x32, 0xb7, 0x21, 0xdf, 0x81, 0xd2, 0x5b, 0x2c, 0x4e, 0x7d, 0x6c, 0xed, 0xd6, 0xff,
0x6f, 0xa4, 0x97, 0xc4, 0x55, 0xd, 0xbf, 0x55, 0x96, 0x88, 0x9e, 0xe, 0x94, 0x51,
][..];
let e = &[0x1, 0x0, 0x1][..];
assert_eq!(key.n.to_bytes_be(), n);
assert_eq!(key.e.to_bytes_be(), e);
}
#[test]
fn it_encrypts_sha1() {
// https://github.com/pyca/cryptography/blob/master/vectors/cryptography_vectors/asymmetric/RSA/pkcs-1v2-1d2-vec/oaep-int.txt
let n = BigUint::from_bytes_be(&[
0xbb, 0xf8, 0x2f, 0x09, 0x06, 0x82, 0xce, 0x9c, 0x23, 0x38, 0xac, 0x2b, 0x9d, 0xa8,
0x71, 0xf7, 0x36, 0x8d, 0x07, 0xee, 0xd4, 0x10, 0x43, 0xa4, 0x40, 0xd6, 0xb6, 0xf0,
0x74, 0x54, 0xf5, 0x1f, 0xb8, 0xdf, 0xba, 0xaf, 0x03, 0x5c, 0x02, 0xab, 0x61, 0xea,
0x48, 0xce, 0xeb, 0x6f, 0xcd, 0x48, 0x76, 0xed, 0x52, 0x0d, 0x60, 0xe1, 0xec, 0x46,
0x19, 0x71, 0x9d, 0x8a, 0x5b, 0x8b, 0x80, 0x7f, 0xaf, 0xb8, 0xe0, 0xa3, 0xdf, 0xc7,
0x37, 0x72, 0x3e, 0xe6, 0xb4, 0xb7, 0xd9, 0x3a, 0x25, 0x84, 0xee, 0x6a, 0x64, 0x9d,
0x06, 0x09, 0x53, 0x74, 0x88, 0x34, 0xb2, 0x45, 0x45, 0x98, 0x39, 0x4e, 0xe0, 0xaa,
0xb1, 0x2d, 0x7b, 0x61, 0xa5, 0x1f, 0x52, 0x7a, 0x9a, 0x41, 0xf6, 0xc1, 0x68, 0x7f,
0xe2, 0x53, 0x72, 0x98, 0xca, 0x2a, 0x8f, 0x59, 0x46, 0xf8, 0xe5, 0xfd, 0x09, 0x1d,
0xbd, 0xcb,
]);
let e = BigUint::from_bytes_be(&[0x11]);
let pub_key = PublicKey { n, e };
let message = &[
0xd4, 0x36, 0xe9, 0x95, 0x69, 0xfd, 0x32, 0xa7, 0xc8, 0xa0, 0x5b, 0xbc, 0x90, 0xd3,
0x2c, 0x49,
];
let seed = &[
0xaa, 0xfd, 0x12, 0xf6, 0x59, 0xca, 0xe6, 0x34, 0x89, 0xb4, 0x79, 0xe5, 0x07, 0x6d,
0xde, 0xc2, 0xf0, 0x6c, 0xb5, 0x8f,
][..];
let mut rng = ReadRng::new(seed);
let cipher_text = super::oaep_encrypt::<_, Sha1>(&mut rng, &pub_key, message).unwrap();
let expected_cipher_text = &[
0x12, 0x53, 0xe0, 0x4d, 0xc0, 0xa5, 0x39, 0x7b, 0xb4, 0x4a, 0x7a, 0xb8, 0x7e, 0x9b,
0xf2, 0xa0, 0x39, 0xa3, 0x3d, 0x1e, 0x99, 0x6f, 0xc8, 0x2a, 0x94, 0xcc, 0xd3, 0x00,
0x74, 0xc9, 0x5d, 0xf7, 0x63, 0x72, 0x20, 0x17, 0x06, 0x9e, 0x52, 0x68, 0xda, 0x5d,
0x1c, 0x0b, 0x4f, 0x87, 0x2c, 0xf6, 0x53, 0xc1, 0x1d, 0xf8, 0x23, 0x14, 0xa6, 0x79,
0x68, 0xdf, 0xea, 0xe2, 0x8d, 0xef, 0x04, 0xbb, 0x6d, 0x84, 0xb1, 0xc3, 0x1d, 0x65,
0x4a, 0x19, 0x70, 0xe5, 0x78, 0x3b, 0xd6, 0xeb, 0x96, 0xa0, 0x24, 0xc2, 0xca, 0x2f,
0x4a, 0x90, 0xfe, 0x9f, 0x2e, 0xf5, 0xc9, 0xc1, 0x40, 0xe5, 0xbb, 0x48, 0xda, 0x95,
0x36, 0xad, 0x87, 0x00, 0xc8, 0x4f, 0xc9, 0x13, 0x0a, 0xde, 0xa7, 0x4e, 0x55, 0x8d,
0x51, 0xa7, 0x4d, 0xdf, 0x85, 0xd8, 0xb5, 0x0d, 0xe9, 0x68, 0x38, 0xd6, 0x06, 0x3e,
0x09, 0x55,
][..];
assert_eq!(&*expected_cipher_text, &*cipher_text);
}
}
|
extern crate futures;
extern crate hyper;
extern crate hyper_tls;
extern crate tokio_core;
use futures::{Future, Stream};
use std::io::Write;
fn main() {
let mut core = tokio_core::reactor::Core::new().unwrap();
let handle = core.handle();
let client = hyper::Client::configure()
.connector(hyper_tls::HttpsConnector::new(4, &handle).unwrap())
.build(&handle);
let work = client.get("https://hyper.rs".parse().unwrap()).and_then(|res| {
println!("Status: {}", res.status());
println!("Headers:\n{}", res.headers());
res.body().for_each(|chunk| {
::std::io::stdout().write_all(&chunk)
.map(|_| ())
.map_err(From::from)
})
});
core.run(work).unwrap();
}
|
pub mod point;
pub mod fpoint;
pub mod ipoint;
pub mod irange;
pub mod pointrng; |
//! World deserialization types.
use serde::{
de::{MapAccess, Visitor},
Deserialize, Deserializer,
};
use super::{
archetypes::de::ArchetypeLayoutDeserializer, entities::de::EntitiesLayoutDeserializer,
EntitySerializer, UnknownType, WorldField,
};
use crate::{
internals::{
storage::{archetype::EntityLayout, component::ComponentTypeId},
world::World,
},
serialize::set_entity_serializer,
storage::UnknownComponentWriter,
};
/// Describes a type which knows how to deserialize the components in a world.
pub trait WorldDeserializer {
/// The stable type ID used to identify each component type in the serialized data.
type TypeId: for<'de> Deserialize<'de>;
/// Converts the serialized type ID into a runtime component type ID.
fn unmap_id(&self, type_id: &Self::TypeId) -> Result<ComponentTypeId, UnknownType>;
/// Adds the specified component to the given entity layout.
fn register_component(&self, type_id: Self::TypeId, layout: &mut EntityLayout);
/// Deserializes a slice of components and inserts them into the given storage.
fn deserialize_component_slice<'a, 'de, D: Deserializer<'de>>(
&self,
type_id: ComponentTypeId,
storage: UnknownComponentWriter<'a>,
deserializer: D,
) -> Result<(), D::Error>;
/// Deserializes a single component and returns it as a boxed u8 slice.
fn deserialize_component<'de, D: Deserializer<'de>>(
&self,
type_id: ComponentTypeId,
deserializer: D,
) -> Result<Box<[u8]>, D::Error>;
}
pub struct WorldVisitor<'a, W: WorldDeserializer, E: EntitySerializer> {
pub world: &'a mut World,
pub world_deserializer: &'a W,
pub entity_serializer: &'a E,
}
impl<'a, 'de, W: WorldDeserializer, E: EntitySerializer> Visitor<'de> for WorldVisitor<'a, W, E> {
type Value = ();
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("map")
}
fn visit_map<V>(self, map: V) -> Result<Self::Value, V::Error>
where
V: MapAccess<'de>,
{
fn run<'a, 'de, W: WorldDeserializer, V: MapAccess<'de>>(
world_deserializer: &'a W,
world: &'a mut World,
mut map: V,
) -> Result<(), V::Error> {
while let Some(key) = map.next_key()? {
match key {
WorldField::Packed => {
map.next_value_seed(ArchetypeLayoutDeserializer {
world_deserializer,
world,
})?;
}
WorldField::Entities => {
map.next_value_seed(EntitiesLayoutDeserializer {
world_deserializer,
world,
})?;
}
}
}
Ok(())
}
let mut hoist = core::cell::Cell::new(None);
let hoist_ref = &mut hoist;
// since it's a double closure, and one is FnMut and the inner one is FnOnce,
// we need to do some ugly hoisting
let mut world = self.world;
let mut map_hoist = Some(map);
let world_deserializer = self.world_deserializer;
let world_inner = &mut world;
let hoist_ref_inner = &hoist_ref;
set_entity_serializer(self.entity_serializer, || {
let map = map_hoist.take().unwrap();
hoist_ref_inner.set(Some(run(world_deserializer, *world_inner, map)));
});
hoist.into_inner().unwrap()
}
}
|
use crate::sim::*;
// searches for the commit at which bisecting yields minimum expected entropy
pub struct MinExpectedEntropy;
impl MinExpectedEntropy {
pub fn new(_: &SimulationState) -> Self { Self }
}
impl BisectStrategy for MinExpectedEntropy {
fn name(&self) -> String { "entropy".to_string() }
fn select_commit(&mut self, s: &SimulationState) -> usize {
s.min_expected_entropy_binary_search()
}
fn notify_result(&mut self, _: BisectAttempt) {}
}
|
use franklin_crypto::{
bellman::{plonk::better_better_cs::cs::ConstraintSystem, Engine, SynthesisError},
plonk::circuit::{allocated_num::Num, linear_combination::LinearCombination},
};
pub trait SpongeGadget<E: Engine, const RATE: usize, const WIDTH: usize> {
fn specialize(
&mut self,
capacity_value: Option<LinearCombination<E>>,
) -> Result<(), SynthesisError>;
fn absorb<CS: ConstraintSystem<E>>(
&mut self,
cs: &mut CS,
input: &[Num<E>],
) -> Result<(), SynthesisError>;
fn squeeze<CS: ConstraintSystem<E>>(
&mut self,
cs: &mut CS,
number_of_elems: Option<usize>,
) -> Result<Vec<Num<E>>, SynthesisError>;
fn reset(&mut self);
}
|
use errors::{Error, ErrorKind, Result};
use futures::prelude::*;
use futures::sync::oneshot::Receiver;
use futures::sync::mpsc::UnboundedReceiver;
use proto::{MqttString, QualityOfService};
pub struct MqttFuture<T>(Receiver<Result<T>>);
impl<T> From<Receiver<Result<T>>> for MqttFuture<T> {
fn from(value: Receiver<Result<T>>) -> Self {
MqttFuture(value)
}
}
impl<T> Future for MqttFuture<T> {
type Item = T;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
try_ready!(self.0.poll().map_err(|_| Error::from(ErrorKind::LoopCommsError)))
.map(|t| t.into())
}
}
pub struct MqttStream<T>(UnboundedReceiver<Result<T>>);
impl<T> From<UnboundedReceiver<Result<T>>> for MqttStream<T> {
fn from(value: UnboundedReceiver<Result<T>>) -> Self {
MqttStream(value)
}
}
impl<T> Stream for MqttStream<T> {
type Item = T;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
match try_ready!(self.0.poll().map_err(|_| Error::from(ErrorKind::LoopCommsError))) {
Some(r) => r.map(|t| Some(t).into()),
None => Ok(Async::Ready(None))
}
}
}
pub struct SubItem(pub(crate) MqttString, pub(crate) Vec<u8>);
impl SubItem {
pub fn topic(&self) -> &String {
&self.0
}
pub fn payload(&self) -> &Vec<u8> {
&self.1
}
}
pub struct Subscription {
qos: QualityOfService,
recv: MqttStream<SubItem>
}
impl Subscription {
pub(crate) fn new(qos: QualityOfService, recv: MqttStream<SubItem>) -> Subscription {
Subscription {
qos,
recv
}
}
pub fn qos(&self) -> QualityOfService {
self.qos.clone()
}
}
impl Stream for Subscription {
type Item = SubItem;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
self.recv.poll()
}
} |
fn main() {
let double = |x| x * 2;
println!("{:?}", double_with_two(double));
}
fn double_with_two<F>(f: F) -> i32 where F: Fn(i32) -> i32 {
f(2)
}
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
fn t1() -> u32 {
let x;
x = if true { [1, 2, 3] } else { [2, 3, 4] }[0];
x
}
fn t2() -> [u32; 1] {
if true { [1, 2, 3]; } else { [2, 3, 4]; }
[0]
}
fn t3() -> u32 {
let x;
x = if true { i1 as F } else { i2 as F }();
x
}
fn t4() -> () {
if true { i1 as F; } else { i2 as F; }
()
}
type F = fn() -> u32;
fn i1() -> u32 { 1 }
fn i2() -> u32 { 2 }
fn main() {
assert_eq!(t1(), 1);
assert_eq!(t3(), 1);
}
|
#[doc = "Reader of register AWD3TR"]
pub type R = crate::R<u32, super::AWD3TR>;
#[doc = "Writer for register AWD3TR"]
pub type W = crate::W<u32, super::AWD3TR>;
#[doc = "Register AWD3TR `reset()`'s with value 0x0fff_0000"]
impl crate::ResetValue for super::AWD3TR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0fff_0000
}
}
#[doc = "Reader of field `HT3`"]
pub type HT3_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `HT3`"]
pub struct HT3_W<'a> {
w: &'a mut W,
}
impl<'a> HT3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0fff << 16)) | (((value as u32) & 0x0fff) << 16);
self.w
}
}
#[doc = "Reader of field `LT3`"]
pub type LT3_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `LT3`"]
pub struct LT3_W<'a> {
w: &'a mut W,
}
impl<'a> LT3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0fff) | ((value as u32) & 0x0fff);
self.w
}
}
impl R {
#[doc = "Bits 16:27 - ADC analog watchdog 3 threshold high"]
#[inline(always)]
pub fn ht3(&self) -> HT3_R {
HT3_R::new(((self.bits >> 16) & 0x0fff) as u16)
}
#[doc = "Bits 0:11 - ADC analog watchdog 3 threshold high"]
#[inline(always)]
pub fn lt3(&self) -> LT3_R {
LT3_R::new((self.bits & 0x0fff) as u16)
}
}
impl W {
#[doc = "Bits 16:27 - ADC analog watchdog 3 threshold high"]
#[inline(always)]
pub fn ht3(&mut self) -> HT3_W {
HT3_W { w: self }
}
#[doc = "Bits 0:11 - ADC analog watchdog 3 threshold high"]
#[inline(always)]
pub fn lt3(&mut self) -> LT3_W {
LT3_W { w: self }
}
}
|
use ndarray::prelude::*;
use petgraph::visit::{EdgeRef, IntoEdges, IntoNodeIdentifiers};
use std::{collections::HashMap, f32::INFINITY, hash::Hash};
pub fn warshall_floyd<G, F>(graph: G, length: F) -> Array2<f32>
where
G: IntoEdges + IntoNodeIdentifiers,
G::NodeId: Eq + Hash,
F: FnMut(G::EdgeRef) -> f32,
{
let indices = graph
.node_identifiers()
.enumerate()
.map(|(i, u)| (u, i))
.collect::<HashMap<_, _>>();
let n = indices.len();
let mut distance = Array::from_elem((n, n), INFINITY);
let mut length = length;
for u in graph.node_identifiers() {
for e in graph.edges(u) {
let i = indices[&e.source()];
let j = indices[&e.target()];
let d = length(e);
distance[[j, i]] = d;
}
}
for i in 0..n {
distance[[i, i]] = 0.;
}
for k in 0..n {
for i in 0..n {
for j in 0..n {
let d = distance[[i, k]] + distance[[k, j]];
if d < distance[[i, j]] {
distance[[i, j]] = d;
}
}
}
}
distance
}
|
#![deny(warnings)]
extern crate futures;
extern crate tokio_mock_task;
extern crate tokio_sync;
use tokio_mock_task::*;
use tokio_sync::oneshot;
use futures::prelude::*;
macro_rules! assert_ready {
($e:expr) => {{
match $e {
Ok(futures::Async::Ready(v)) => v,
Ok(_) => panic!("not ready"),
Err(e) => panic!("error = {:?}", e),
}
}};
}
macro_rules! assert_not_ready {
($e:expr) => {{
match $e {
Ok(futures::Async::NotReady) => {}
Ok(futures::Async::Ready(v)) => panic!("ready; value = {:?}", v),
Err(e) => panic!("error = {:?}", e),
}
}};
}
trait AssertSend: Send {}
impl AssertSend for oneshot::Sender<i32> {}
impl AssertSend for oneshot::Receiver<i32> {}
#[test]
fn send_recv() {
let (tx, mut rx) = oneshot::channel();
let mut task = MockTask::new();
task.enter(|| {
assert_not_ready!(rx.poll());
});
assert!(tx.send(1).is_ok());
assert!(task.is_notified());
let val = assert_ready!(rx.poll());
assert_eq!(val, 1);
}
#[test]
fn close_tx() {
let (tx, mut rx) = oneshot::channel::<i32>();
let mut task = MockTask::new();
task.enter(|| {
assert_not_ready!(rx.poll());
});
drop(tx);
assert!(task.is_notified());
assert!(rx.poll().is_err());
}
#[test]
fn close_rx() {
// First, without checking poll_close()
//
let (tx, _) = oneshot::channel();
assert!(tx.send(1).is_err());
// Second, via poll_close();
let (mut tx, rx) = oneshot::channel();
let mut task = MockTask::new();
task.enter(|| assert_not_ready!(tx.poll_close()));
drop(rx);
assert!(task.is_notified());
assert!(tx.is_closed());
assert_ready!(tx.poll_close());
assert!(tx.send(1).is_err());
}
#[test]
fn explicit_close_poll() {
// First, with message sent
let (tx, mut rx) = oneshot::channel();
assert!(tx.send(1).is_ok());
rx.close();
let value = assert_ready!(rx.poll());
assert_eq!(value, 1);
println!("~~~~~~~~~ TWO ~~~~~~~~~~");
// Second, without the message sent
let (mut tx, mut rx) = oneshot::channel::<i32>();
let mut task = MockTask::new();
task.enter(|| assert_not_ready!(tx.poll_close()));
rx.close();
assert!(task.is_notified());
assert!(tx.is_closed());
assert_ready!(tx.poll_close());
assert!(tx.send(1).is_err());
assert!(rx.poll().is_err());
// Again, but without sending the value this time
let (mut tx, mut rx) = oneshot::channel::<i32>();
let mut task = MockTask::new();
task.enter(|| assert_not_ready!(tx.poll_close()));
rx.close();
assert!(task.is_notified());
assert!(tx.is_closed());
assert_ready!(tx.poll_close());
assert!(rx.poll().is_err());
}
#[test]
fn explicit_close_try_recv() {
// First, with message sent
let (tx, mut rx) = oneshot::channel();
assert!(tx.send(1).is_ok());
rx.close();
assert_eq!(rx.try_recv().unwrap(), 1);
println!("~~~~~~~~~ TWO ~~~~~~~~~~");
// Second, without the message sent
let (mut tx, mut rx) = oneshot::channel::<i32>();
let mut task = MockTask::new();
task.enter(|| assert_not_ready!(tx.poll_close()));
rx.close();
assert!(task.is_notified());
assert!(tx.is_closed());
assert_ready!(tx.poll_close());
assert!(rx.try_recv().is_err());
}
#[test]
#[should_panic]
fn close_try_recv_poll() {
let (_tx, mut rx) = oneshot::channel::<i32>();
let mut task = MockTask::new();
rx.close();
assert!(rx.try_recv().is_err());
task.enter(|| {
let _ = rx.poll();
});
}
#[test]
fn drops_tasks() {
let (mut tx, mut rx) = oneshot::channel::<i32>();
let mut tx_task = MockTask::new();
let mut rx_task = MockTask::new();
tx_task.enter(|| {
assert_not_ready!(tx.poll_close());
});
rx_task.enter(|| {
assert_not_ready!(rx.poll());
});
drop(tx);
drop(rx);
assert_eq!(1, tx_task.notifier_ref_count());
assert_eq!(1, rx_task.notifier_ref_count());
}
#[test]
fn receiver_changes_task() {
let (tx, mut rx) = oneshot::channel();
let mut task1 = MockTask::new();
let mut task2 = MockTask::new();
task1.enter(|| {
assert_not_ready!(rx.poll());
});
assert_eq!(2, task1.notifier_ref_count());
assert_eq!(1, task2.notifier_ref_count());
task2.enter(|| {
assert_not_ready!(rx.poll());
});
assert_eq!(1, task1.notifier_ref_count());
assert_eq!(2, task2.notifier_ref_count());
tx.send(1).unwrap();
assert!(!task1.is_notified());
assert!(task2.is_notified());
assert_ready!(rx.poll());
}
#[test]
fn sender_changes_task() {
let (mut tx, rx) = oneshot::channel::<i32>();
let mut task1 = MockTask::new();
let mut task2 = MockTask::new();
task1.enter(|| {
assert_not_ready!(tx.poll_close());
});
assert_eq!(2, task1.notifier_ref_count());
assert_eq!(1, task2.notifier_ref_count());
task2.enter(|| {
assert_not_ready!(tx.poll_close());
});
assert_eq!(1, task1.notifier_ref_count());
assert_eq!(2, task2.notifier_ref_count());
drop(rx);
assert!(!task1.is_notified());
assert!(task2.is_notified());
assert_ready!(tx.poll_close());
}
|
use criterion::{criterion_group, criterion_main, Criterion, Throughput};
use iox_data_generator::{
agent::Agent,
specification::{
AgentAssignmentSpec, AgentSpec, DataSpec, DatabaseWriterSpec, FieldSpec, FieldValueSpec,
MeasurementSpec,
},
tag_set::GeneratedTagSets,
write::PointsWriterBuilder,
};
use std::{
sync::{atomic::AtomicU64, Arc},
time::Duration,
};
pub fn single_agent(c: &mut Criterion) {
let spec = DataSpec {
name: "benchmark".into(),
values: vec![],
tag_sets: vec![],
agents: vec![AgentSpec {
name: "foo".to_string(),
measurements: vec![MeasurementSpec {
name: "measurement-1".into(),
count: None,
fields: vec![FieldSpec {
name: "field-1".into(),
field_value_spec: FieldValueSpec::Bool(true),
count: None,
}],
tag_set: None,
tag_pairs: vec![],
}],
has_one: vec![],
tag_pairs: vec![],
}],
database_writers: vec![DatabaseWriterSpec {
database_ratio: Some(1.0),
database_regex: None,
agents: vec![AgentAssignmentSpec {
name: "foo".to_string(),
count: None,
sampling_interval: "1s".to_string(),
}],
}],
};
let mut points_writer = PointsWriterBuilder::new_no_op(true);
let start_datetime = Some(0);
let one_hour_s = 60 * 60;
let ns_per_second = 1_000_000_000;
let end_datetime = Some(one_hour_s * ns_per_second);
let expected_points = 3601;
let mut group = c.benchmark_group("single_agent");
group.throughput(Throughput::Elements(expected_points));
group.bench_function("single agent with basic configuration", |b| {
b.iter(|| {
let r = block_on(iox_data_generator::generate(
&spec,
vec!["foo_bar".to_string()],
&mut points_writer,
start_datetime,
end_datetime,
0,
false,
1,
false,
));
let n_points = r.expect("Could not generate data");
assert_eq!(n_points, expected_points as usize);
})
});
}
pub fn agent_pre_generated(c: &mut Criterion) {
let spec: DataSpec = toml::from_str(
r#"
name = "storage_cardinality_example"
# Values are automatically generated before the agents are initialized. They generate tag key/value
# pairs with the name of the value as the tag key and the evaluated template as the value. These
# pairs are Arc wrapped so they can be shared across tagsets and used in the agents as
# pre-generated data.
[[values]]
# the name must not have a . in it, which is used to access children later. Otherwise it's open.
name = "role"
# the template can use a number of helpers to get an id, a random string and the name, see below
# for examples
template = "storage"
# this number of tag pairs will be generated. If this is > 1, the id or a random character string
# should be used in the template to ensure that the tag key/value pairs are unique.
cardinality = 1
[[values]]
name = "url"
template = "http://127.0.0.1:6060/metrics/usage"
cardinality = 1
[[values]]
name = "org_id"
# Fill in the value with the cardinality counter and 15 random alphanumeric characters
template = "{{id}}_{{random 15}}"
cardinality = 1000
has_one = ["env"]
[[values]]
name = "env"
template = "whatever-environment-{{id}}"
cardinality = 10
[[values]]
name = "bucket_id"
# a bucket belongs to an org. With this, you would be able to access the org.id or org.value in the
# template
belongs_to = "org_id"
# each bucket will have a unique id, which is used here to guarantee uniqueness even across orgs.
# We also have a random 15 character alphanumeric sequence to pad out the value length.
template = "{{id}}_{{random 15}}"
# For each org, 3 buckets will be generated
cardinality = 3
[[values]]
name = "partition_id"
template = "{{id}}"
cardinality = 10
# makes a tagset so every bucket appears in every partition. The other tags are descriptive and
# don't increase the cardinality beyond count(bucket) * count(partition). Later this example will
# use the agent and measurement generation to take this base tagset and increase cardinality on a
# per-agent basis.
[[tag_sets]]
name = "bucket_set"
for_each = [
"role",
"url",
"org_id",
"org_id.env",
"org_id.bucket_id",
"partition_id",
]
[[agents]]
name = "foo"
[[agents.measurements]]
name = "storage_usage_bucket_cardinality"
# each sampling will have all the tag sets from this collection in addition to the tags and
# tag_pairs specified
tag_set = "bucket_set"
# for each agent, this specific measurement will be decorated with these additional tags.
tag_pairs = [
{key = "node_id", template = "{{agent.id}}"},
{key = "hostname", template = "{{agent.id}}"},
{key = "host", template = "storage-{{agent.id}}"},
]
[[agents.measurements.fields]]
name = "gauge"
i64_range = [1, 8147240]
[[database_writers]]
agents = [{name = "foo", sampling_interval = "1s", count = 3}]
"#,
)
.unwrap();
let generated_tag_sets = GeneratedTagSets::from_spec(&spec).unwrap();
let mut points_writer = PointsWriterBuilder::new_no_op(true);
let start_datetime = Some(0);
let one_hour_s = 60 * 60;
let ns_per_second = 1_000_000_000;
let end_datetime = Some(one_hour_s * ns_per_second);
let mut agents = Agent::from_spec(
&spec.agents[0],
3,
Duration::from_millis(10),
start_datetime,
end_datetime,
0,
false,
&generated_tag_sets,
)
.unwrap();
let agent = agents.first_mut().unwrap();
let expected_points = 30000;
let counter = Arc::new(AtomicU64::new(0));
let request_counter = Arc::new(AtomicU64::new(0));
let mut group = c.benchmark_group("agent_pre_generated");
group.measurement_time(std::time::Duration::from_secs(50));
group.throughput(Throughput::Elements(expected_points));
group.bench_function("single agent with basic configuration", |b| {
b.iter(|| {
agent.reset_current_date_time(0);
let points_writer =
Arc::new(points_writer.build_for_agent("foo", "foo", "foo").unwrap());
let r = block_on(agent.generate_all(
points_writer,
1,
Arc::clone(&counter),
Arc::clone(&request_counter),
));
let n_points = r.expect("Could not generate data");
assert_eq!(n_points.row_count, expected_points as usize);
})
});
}
#[tokio::main]
async fn block_on<F: std::future::Future>(f: F) -> F::Output {
f.await
}
criterion_group!(benches, single_agent, agent_pre_generated);
criterion_main!(benches);
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::collections::vec_deque::*;
use alloc::sync::Arc;
use spin::Mutex;
use core::ops::Deref;
use super::super::super::qlib::common::*;
use super::super::super::qlib::linux_def::*;
use super::queue::*;
use super::*;
use super::super::super::task::*;
// Chan with >=1 chanel size
pub struct BufChanInternel<T: Sized> {
pub buf : VecDeque<T>,
pub space: usize,
pub queue: Queue,
pub closed: bool,
}
#[derive(Clone)]
pub struct BufChan<T>(Arc<Mutex<BufChanInternel<T>>>);
impl <T> Deref for BufChan<T> {
type Target = Arc<Mutex<BufChanInternel<T>>>;
fn deref(&self) -> &Arc<Mutex<BufChanInternel<T>>> {
&self.0
}
}
impl <T> BufChan <T> {
pub fn New(size: usize) -> Self {
let internel = BufChanInternel {
buf: VecDeque::with_capacity(size),
space: size,
queue: Queue::default(),
closed: false,
};
return Self(Arc::new(Mutex::new(internel)))
}
// Get the items waiting in the buffer
pub fn Len(&self) -> usize {
return self.lock().buf.len();
}
pub fn Write(&self, task: &Task, data: T) -> Result<()> {
loop {
{
let mut c = self.lock();
if c.closed {
return Err(Error::ChanClose)
}
if c.space > 0 {
c.buf.push_back(data);
c.space -= 1;
c.queue.Notify(EVENT_IN);
return Ok(())
}
let block = task.blocker.clone();
c.queue.EventRegister(task, &block.generalEntry, EVENT_OUT);
}
task.blocker.BlockGeneral()?;
{
let c = self.lock();
let block = task.blocker.clone();
c.queue.EventUnregister(task, &block.generalEntry);
}
}
}
//unblock write, return true if write successfully. otherwise false.
pub fn TryWrite(&self, _task: &Task, data: T) -> Result<bool> {
let mut c = self.lock();
if c.closed {
return Err(Error::ChanClose)
}
if c.space > 0 {
c.buf.push_back(data);
c.space -= 1;
c.queue.Notify(EVENT_IN);
return Ok(true)
}
return Ok(false);
}
pub fn Read(&self, task: &Task) -> Result<T> {
loop {
{
let mut c = self.lock();
if c.closed {
return Err(Error::ChanClose)
}
if c.buf.len() > 0 {
let ret = c.buf.pop_front().unwrap();
c.space += 1;
c.queue.Notify(EVENT_OUT);
return Ok(ret);
}
let block = task.blocker.clone();
c.queue.EventRegister(task, &block.generalEntry, EVENT_IN);
}
task.blocker.BlockGeneral()?;
{
let c = self.lock();
let block = task.blocker.clone();
c.queue.EventUnregister(task, &block.generalEntry);
}
}
}
pub fn TryRead(&self) -> Result<Option<T>> {
let mut c = self.lock();
if c.closed {
return Err(Error::ChanClose)
}
if c.buf.len() > 0 {
let ret = c.buf.pop_front().unwrap();
c.space += 1;
c.queue.Notify(EVENT_OUT);
return Ok(Some(ret));
}
return Ok(None);
}
pub fn Close(&self) {
let mut c = self.lock();
c.queue.Notify(!0);
c.closed = true;
}
} |
#[macro_use] extern crate lazy_static;
pub mod common;
pub mod day07;
pub mod day08;
pub mod day09;
pub mod day10;
pub mod day11;
pub mod day12;
pub mod day13;
pub mod day14;
pub mod day15;
pub mod day16;
pub mod day17;
pub mod day18;
pub mod day19;
pub mod day20;
pub mod day21;
pub mod day22;
pub mod day24;
pub mod day25;
#[cfg(test)]
// https://doc.rust-lang.org/book/ch11-02-running-tests.html#controlling-how-tests-are-run
mod tests;
|
/*!
An application that runs in the system tray.
Requires the following features: `cargo run --example system_tray_d --features "tray-notification message-window menu cursor"`
*/
extern crate native_windows_gui as nwg;
extern crate native_windows_derive as nwd;
use nwd::NwgUi;
use nwg::NativeUi;
#[derive(Default, NwgUi)]
pub struct SystemTray {
#[nwg_control]
window: nwg::MessageWindow,
#[nwg_resource(source_file: Some("./test_rc/cog.ico"))]
icon: nwg::Icon,
#[nwg_control(icon: Some(&data.icon), tip: Some("Hello"))]
#[nwg_events(MousePressLeftUp: [SystemTray::show_menu], OnContextMenu: [SystemTray::show_menu])]
tray: nwg::TrayNotification,
#[nwg_control(parent: window, popup: true)]
tray_menu: nwg::Menu,
#[nwg_control(parent: tray_menu, text: "Hello")]
#[nwg_events(OnMenuItemSelected: [SystemTray::hello1])]
tray_item1: nwg::MenuItem,
#[nwg_control(parent: tray_menu, text: "Popup")]
#[nwg_events(OnMenuItemSelected: [SystemTray::hello2])]
tray_item2: nwg::MenuItem,
#[nwg_control(parent: tray_menu, text: "Exit")]
#[nwg_events(OnMenuItemSelected: [SystemTray::exit])]
tray_item3: nwg::MenuItem,
}
impl SystemTray {
fn show_menu(&self) {
let (x, y) = nwg::GlobalCursor::position();
self.tray_menu.popup(x, y);
}
fn hello1(&self) {
nwg::simple_message("Hello", "Hello World!");
}
fn hello2(&self) {
let flags = nwg::TrayNotificationFlags::USER_ICON | nwg::TrayNotificationFlags::LARGE_ICON;
self.tray.show("Hello World", Some("Welcome to my application"), Some(flags), Some(&self.icon));
}
fn exit(&self) {
nwg::stop_thread_dispatch();
}
}
fn main() {
nwg::init().expect("Failed to init Native Windows GUI");
let _ui = SystemTray::build_ui(Default::default()).expect("Failed to build UI");
nwg::dispatch_thread_events();
}
|
use std::time::Duration;
use imgui_winit_support::WinitPlatform;
use legion::*;
use winit::window::Window;
use imgui::*;
use imgui_wgpu::{Renderer, RendererConfig};
use crate::{
wgpu_state::WgpuState,
app_state::AppState,
application::DeltaTime,
events::Events,
command::Command,
};
pub struct UiState {
pub imgui: Context,
pub platform: WinitPlatform,
renderer: Renderer,
last_cursor: Option<MouseCursor>,
}
impl UiState {
pub fn new(window: &Window, wgpu_state: &WgpuState) -> Self {
let mut imgui = imgui::Context::create();
let mut platform = imgui_winit_support::WinitPlatform::init(&mut imgui);
platform.attach_window(
imgui.io_mut(),
&window,
imgui_winit_support::HiDpiMode::Default,
);
imgui.set_ini_filename(None);
let hidpi_factor = window.scale_factor();
let font_size = (13.0 * hidpi_factor) as f32;
imgui.io_mut().font_global_scale = (1.0 / hidpi_factor) as f32;
imgui.fonts().add_font(&[FontSource::DefaultFontData {
config: Some(imgui::FontConfig {
oversample_h: 1,
pixel_snap_h: true,
size_pixels: font_size,
..Default::default()
}),
}]);
let renderer_config = RendererConfig {
texture_format: wgpu_state.sc_desc.format,
..Default::default()
};
let renderer = Renderer::new(&mut imgui, &wgpu_state.device, &wgpu_state.queue, renderer_config);
Self {
imgui,
platform,
renderer,
last_cursor: None,
}
}
pub fn render_ui(&mut self, window: &Window, resources: &Resources) {
let delta_time = resources.get::<DeltaTime>().unwrap();
let wgpu_state = resources.get::<WgpuState>().unwrap();
let app_state = resources.get::<AppState>().unwrap();
let mut commands = resources.get_mut::<Events<Command>>().unwrap();
if let None = wgpu_state.current_frame { return; }
let frame = &wgpu_state.current_frame.as_ref().unwrap().output;
self.imgui.io_mut().update_delta_time(Duration::from_secs_f32(delta_time.0));
self.platform
.prepare_frame(self.imgui.io_mut(), &window)
.expect("Failed to prepare frame");
let ui = self.imgui.frame();
{
let mut current_collection = app_state.current_collection;
let size = window.inner_size().to_logical::<f32>(window.scale_factor());
let panel = imgui::Window::new(im_str!("Outliner"));
panel
.position([0.0, 0.0], Condition::FirstUseEver)
.size([300.0, size.height], Condition::FirstUseEver)
.build(&ui, || {
if CollapsingHeader::new(im_str!("Files"))
.default_open(true)
.build(&ui) {
let collection_indices = &app_state.data_accessor.collection_indices;
let mut collection_names: Vec<String> = vec![String::new(); collection_indices.len()];
for (name, index) in collection_indices {
let mut name = name.clone();
name.push_str("\0");
collection_names[*index] = name;
}
imgui::ListBox::new(im_str!(""))
.build_simple(&ui, &mut current_collection, collection_names.as_slice(), &|item: &String| {
let result = unsafe { ImStr::from_utf8_with_nul_unchecked(item.as_bytes()).into() };
return result;
});
/*ui.separator();
let mouse_pos = ui.io().mouse_pos;
ui.text(im_str!("Mouse Position: ({:.1},{:.1})", mouse_pos[0], mouse_pos[1]));*/
}
if CollapsingHeader::new(im_str!("Scene Nodes"))
.default_open(true)
.build(&ui) {
let collection = &app_state.data_accessor.collections[current_collection];
for i in 0..collection.scene_nodes.len() {
let is_root = if let None = collection.scene_nodes[i].get_parent() {
true
} else {
false
};
if is_root {
Self::add_tree_nodes_recursive((current_collection, i), &app_state, &ui);
}
}
}
}
);
if app_state.current_collection != current_collection {
commands.send(Command::SetCurrentScene(current_collection));
}
}
let mut encoder = wgpu_state
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("UI Render Encoder"),
});
if self.last_cursor != ui.mouse_cursor() {
self.last_cursor = ui.mouse_cursor();
self.platform.prepare_render(&ui, window);
}
{
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
color_attachments: &[wgpu::RenderPassColorAttachmentDescriptor {
attachment: &frame.view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Load,
store: true,
},
}],
depth_stencil_attachment: None,
});
self.renderer
.render(ui.render(), &wgpu_state.queue, &wgpu_state.device, &mut render_pass)
.expect("Rendering failed");
}
wgpu_state.queue.submit(Some(encoder.finish()));
}
fn add_tree_nodes_recursive(scene_node: (usize, usize), app_state: &AppState, ui: &Ui) {
let collection = &app_state.data_accessor.collections[scene_node.0];
let node = &collection.scene_nodes[scene_node.1];
let mut name = node.name.clone();
name.push_str("\0");
let is_leaf = node.children.is_empty();
let is_selected = if let Some(selected_node) = app_state.selected_scene {
selected_node == scene_node
} else { false };
imgui::TreeNode::new(unsafe { ImStr::from_utf8_with_nul_unchecked(name.as_bytes().into()) })
.default_open(true)
.open_on_arrow(true)
.leaf(is_leaf)
.selected(is_selected)
.build(&ui, || {
for child in &node.children {
Self::add_tree_nodes_recursive((scene_node.0, *child), app_state, &ui);
}
/*if ui.is_item_clicked(MouseButton::Left) {
app_state.selected_scene = Some(scene_node);
}*/
});
}
}
|
pub struct ProconReader<R: std::io::Read> {
reader: R,
}
impl<R: std::io::Read> ProconReader<R> {
pub fn new(reader: R) -> Self {
Self { reader }
}
pub fn get<T: std::str::FromStr>(&mut self) -> T {
use std::io::Read;
let buf = self
.reader
.by_ref()
.bytes()
.map(|b| b.unwrap())
.skip_while(|&byte| byte == b' ' || byte == b'\n' || byte == b'\r')
.take_while(|&byte| byte != b' ' && byte != b'\n' && byte != b'\r')
.collect::<Vec<_>>();
std::str::from_utf8(&buf)
.unwrap()
.parse()
.ok()
.expect("Parse Error.")
}
}
macro_rules! chmax {
($a:expr, $b:expr) => {
$a = std::cmp::max($a, $b)
};
}
macro_rules! chmin {
($a:expr, $b:expr) => {
$a = std::cmp::min($a, $b)
};
}
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let n: usize = rd.get();
let m: i64 = rd.get();
let a: Vec<i64> = (0..n).map(|_| rd.get()).collect();
let mut dp = vec![0, 0];
for i in (0..n).rev() {
let x = a[i];
let mut nxt = vec![0, 0];
nxt[0] = dp[1] + (x - m);
nxt[1] = dp[0] - (x - m);
if x >= 2 {
chmax!(nxt[0], dp[0] + (x - 1) - (1 - m));
chmin!(nxt[1], dp[1] - (x - 1) + (1 - m));
}
dp = nxt;
}
println!("{}", if dp[0] > 0 { "First" } else { "Second" });
}
|
use crate::vec3::Color;
use crate::ray::Ray;
pub fn write_color(pixel_color: Color, samples_per_pixel: i32, output: &mut String) {
// Write the translated [0,255] value of each color component
// let r = (255.999 * pixel_color.r()) as i32;
// let g = (255.999 * pixel_color.g()) as i32;
// let b = (255.999 * pixel_color.b()) as i32;
let mut r = pixel_color.r();
let mut g = pixel_color.g();
let mut b = pixel_color.b();
// Divide the color by the number of samples
let scale = 1.0 / samples_per_pixel as f32;
r = (scale * r).sqrt();
g = (scale * g).sqrt();
b = (scale * b).sqrt();
// println!("{} {} {}",
// (256.0 * clamp(r, 0.0, 0.999)) as i32,
// (256.0 * clamp(g, 0.0, 0.999)) as i32,
// (256.0 * clamp(b, 0.0, 0.999)) as i32);
*output = format!("{}\n{} {} {}", output,
(256.0 * clamp(r, 0.0, 0.999)) as i32,
(256.0 * clamp(g, 0.0, 0.999)) as i32,
(256.0 * clamp(b, 0.0, 0.999)) as i32,
);
}
pub fn ray_color(r: &Ray) -> Color {
let unit_direction = r.direction().unit_vector();
// println!("{}", unit_direction.y());
let t = 0.5 * (unit_direction.y() + 1.0);
// linear blend: blendedValue = (1-t) * startValue + t * endValue
Color::new(1.0, 1.0, 1.0) * (1.0-t) + Color::new(0.5, 0.7, 1.0) * t
}
fn clamp(x: f32, min: f32, max: f32) -> f32 {
if x < min { return min; }
if x > max { return max; }
x
} |
//! # Erasure Coding and Recovery
//!
//! Blobs are logically grouped into erasure sets or blocks. Each set contains 16 sequential data
//! blobs and 4 sequential coding blobs.
//!
//! Coding blobs in each set starting from `start_idx`:
//! For each erasure set:
//! generate `NUM_CODING` coding_blobs.
//! index the coding blobs from `start_idx` to `start_idx + NUM_CODING - 1`.
//!
//! model of an erasure set, with top row being data blobs and second being coding
//! |<======================= NUM_DATA ==============================>|
//! |<==== NUM_CODING ===>|
//! +---+ +---+ +---+ +---+ +---+ +---+ +---+ +---+ +---+ +---+
//! | D | | D | | D | | D | | D | | D | | D | | D | | D | | D |
//! +---+ +---+ +---+ +---+ +---+ . . . +---+ +---+ +---+ +---+ +---+
//! | C | | C | | C | | C | | | | | | | | | | | | |
//! +---+ +---+ +---+ +---+ +---+ +---+ +---+ +---+ +---+ +---+
//!
//! blob structure for coding blobs
//!
//! + ------- meta is set and used by transport, meta.size is actual length
//! | of data in the byte array blob.data
//! |
//! | + -- data is stuff shipped over the wire, and has an included
//! | | header
//! V V
//! +----------+------------------------------------------------------------+
//! | meta | data |
//! |+---+-- |+---+---+---+---+------------------------------------------+|
//! || s | . || i | | f | s | ||
//! || i | . || n | i | l | i | ||
//! || z | . || d | d | a | z | blob.data(), or blob.data_mut() ||
//! || e | || e | | g | e | ||
//! |+---+-- || x | | s | | ||
//! | |+---+---+---+---+------------------------------------------+|
//! +----------+------------------------------------------------------------+
//! | |<=== coding blob part for "coding" =======>|
//! | |
//! |<============== data blob part for "coding" ==============>|
//!
//!
use crate::packet::{Blob, SharedBlob, BLOB_HEADER_SIZE};
use std::cmp;
use std::convert::AsMut;
use std::sync::{Arc, RwLock};
use reed_solomon_erasure::ReedSolomon;
//TODO(sakridge) pick these values
/// Number of data blobs
pub const NUM_DATA: usize = 8;
/// Number of coding blobs; also the maximum number that can go missing.
pub const NUM_CODING: usize = 8;
/// Total number of blobs in an erasure set; includes data and coding blobs
pub const ERASURE_SET_SIZE: usize = NUM_DATA + NUM_CODING;
type Result<T> = std::result::Result<T, reed_solomon_erasure::Error>;
/// Represents an erasure "session" with a particular configuration and number of data and coding
/// blobs
#[derive(Debug, Clone)]
pub struct Session(ReedSolomon);
/// Generates coding blobs on demand given data blobs
#[derive(Debug, Clone)]
pub struct CodingGenerator {
/// SharedBlobs that couldn't be used in last call to next()
leftover: Vec<SharedBlob>,
session: Arc<Session>,
}
impl Session {
pub fn new(data_count: usize, coding_count: usize) -> Result<Session> {
let rs = ReedSolomon::new(data_count, coding_count)?;
Ok(Session(rs))
}
/// Create coding blocks by overwriting `parity`
pub fn encode(&self, data: &[&[u8]], parity: &mut [&mut [u8]]) -> Result<()> {
self.0.encode_sep(data, parity)?;
Ok(())
}
/// Recover data + coding blocks into data blocks
/// # Arguments
/// * `data` - array of data blocks to recover into
/// * `coding` - array of coding blocks
/// * `erasures` - list of indices in data where blocks should be recovered
pub fn decode_blocks(&self, blocks: &mut [&mut [u8]], present: &[bool]) -> Result<()> {
self.0.reconstruct(blocks, present)?;
Ok(())
}
/// Returns `(number_of_data_blobs, number_of_coding_blobs)`
pub fn dimensions(&self) -> (usize, usize) {
(self.0.data_shard_count(), self.0.parity_shard_count())
}
/// Reconstruct any missing blobs in this erasure set if possible
/// Re-indexes any coding blobs that have been reconstructed and fixes up size in metadata
/// Assumes that the user has sliced into the blobs appropriately already. else recovery will
/// return an error or garbage data
pub fn reconstruct_blobs<B>(
&self,
blobs: &mut [B],
present: &[bool],
size: usize,
block_start_idx: u64,
slot: u64,
) -> Result<(Vec<Blob>, Vec<Blob>)>
where
B: AsMut<[u8]>,
{
let mut blocks: Vec<&mut [u8]> = blobs.iter_mut().map(AsMut::as_mut).collect();
trace!("[reconstruct_blobs] present: {:?}, size: {}", present, size,);
// Decode the blocks
self.decode_blocks(blocks.as_mut_slice(), &present)?;
let mut recovered_data = vec![];
let mut recovered_coding = vec![];
let erasures = present
.iter()
.enumerate()
.filter_map(|(i, present)| if *present { None } else { Some(i) });
// Create the missing blobs from the reconstructed data
for n in erasures {
let data_size;
let idx;
let first_byte;
if n < NUM_DATA {
let mut blob = Blob::new(&blocks[n]);
data_size = blob.data_size() as usize - BLOB_HEADER_SIZE;
idx = n as u64 + block_start_idx;
first_byte = blob.data[0];
blob.set_size(data_size);
recovered_data.push(blob);
} else {
let mut blob = Blob::default();
blob.data_mut()[..size].copy_from_slice(&blocks[n]);
data_size = size;
idx = (n as u64 + block_start_idx) - NUM_DATA as u64;
first_byte = blob.data[0];
blob.set_slot(slot);
blob.set_index(idx);
blob.set_size(data_size);
recovered_coding.push(blob);
}
trace!(
"[reconstruct_blobs] erasures[{}] ({}) data_size: {} data[0]: {}",
n,
idx,
data_size,
first_byte
);
}
Ok((recovered_data, recovered_coding))
}
}
impl CodingGenerator {
pub fn new(session: Arc<Session>) -> Self {
CodingGenerator {
leftover: Vec::with_capacity(session.0.data_shard_count()),
session,
}
}
/// Yields next set of coding blobs, if any.
/// Must be called with consecutive data blobs within a slot.
///
/// Passing in a slice with the first blob having a new slot will cause internal state to
/// reset, so the above concern does not apply to slot boundaries, only indexes within a slot
/// must be consecutive.
///
/// If used improperly, it my return garbage coding blobs, but will not give an
/// error.
pub fn next(&mut self, next_data: &[SharedBlob]) -> Vec<SharedBlob> {
let (num_data, num_coding) = self.session.dimensions();
let mut next_coding =
Vec::with_capacity((self.leftover.len() + next_data.len()) / num_data * num_coding);
if !self.leftover.is_empty()
&& !next_data.is_empty()
&& self.leftover[0].read().unwrap().slot() != next_data[0].read().unwrap().slot()
{
self.leftover.clear();
}
let next_data: Vec<_> = self.leftover.iter().chain(next_data).cloned().collect();
for data_blobs in next_data.chunks(num_data) {
if data_blobs.len() < num_data {
self.leftover = data_blobs.to_vec();
break;
}
self.leftover.clear();
// find max_data_size for the erasure set
let max_data_size = data_blobs
.iter()
.fold(0, |max, blob| cmp::max(blob.read().unwrap().meta.size, max));
let data_locks: Vec<_> = data_blobs.iter().map(|b| b.read().unwrap()).collect();
let data_ptrs: Vec<_> = data_locks
.iter()
.map(|l| &l.data[..max_data_size])
.collect();
let mut coding_blobs = Vec::with_capacity(num_coding);
for data_blob in &data_locks[..num_coding] {
let index = data_blob.index();
let slot = data_blob.slot();
let id = data_blob.id();
let genesis_blockhash = data_blob.genesis_blockhash();
let mut coding_blob = Blob::default();
coding_blob.set_genesis_blockhash(&genesis_blockhash);
coding_blob.set_index(index);
coding_blob.set_slot(slot);
coding_blob.set_id(&id);
coding_blob.set_size(max_data_size);
coding_blob.set_coding();
coding_blobs.push(coding_blob);
}
if {
let mut coding_ptrs: Vec<_> = coding_blobs
.iter_mut()
.map(|blob| &mut blob.data_mut()[..max_data_size])
.collect();
self.session.encode(&data_ptrs, coding_ptrs.as_mut_slice())
}
.is_ok()
{
next_coding.append(&mut coding_blobs);
}
}
next_coding
.into_iter()
.map(|blob| Arc::new(RwLock::new(blob)))
.collect()
}
}
impl Default for Session {
fn default() -> Session {
Session::new(NUM_DATA, NUM_CODING).unwrap()
}
}
impl Default for CodingGenerator {
fn default() -> Self {
let session = Session::default();
CodingGenerator {
leftover: Vec::with_capacity(session.0.data_shard_count()),
session: Arc::new(session),
}
}
}
#[cfg(test)]
pub mod test {
use super::*;
use crate::blockBufferPool::get_tmp_ledger_path;
use crate::blockBufferPool::Blocktree;
use crate::packet::{index_blobs, SharedBlob, BLOB_DATA_SIZE, BLOB_HEADER_SIZE};
use morgan_interface::pubkey::Pubkey;
use morgan_interface::signature::{Keypair, KeypairUtil};
use std::borrow::Borrow;
/// Specifies the contents of a 16-data-blob and 4-coding-blob erasure set
/// Exists to be passed to `generate_blocktree_with_coding`
#[derive(Debug, Copy, Clone)]
pub struct ErasureSpec {
/// Which 16-blob erasure set this represents
pub set_index: u64,
pub num_data: usize,
pub num_coding: usize,
}
/// Specifies the contents of a slot
/// Exists to be passed to `generate_blocktree_with_coding`
#[derive(Debug, Clone)]
pub struct SlotSpec {
pub slot: u64,
pub set_specs: Vec<ErasureSpec>,
}
/// Model of a slot in 16-blob chunks with varying amounts of erasure and coding blobs
/// present
#[derive(Debug, Clone)]
pub struct SlotModel {
pub slot: u64,
pub chunks: Vec<ErasureSetModel>,
}
/// Model of 16-blob chunk
#[derive(Debug, Clone)]
pub struct ErasureSetModel {
pub set_index: u64,
pub start_index: u64,
pub coding: Vec<SharedBlob>,
pub data: Vec<SharedBlob>,
}
#[test]
fn test_coding() {
const N_DATA: usize = 4;
const N_CODING: usize = 2;
let session = Session::new(N_DATA, N_CODING).unwrap();
let mut vs: Vec<Vec<u8>> = (0..N_DATA as u8).map(|i| (i..(16 + i)).collect()).collect();
let v_orig: Vec<u8> = vs[0].clone();
let mut coding_blocks: Vec<_> = (0..N_CODING).map(|_| vec![0u8; 16]).collect();
let mut coding_blocks_slices: Vec<_> =
coding_blocks.iter_mut().map(Vec::as_mut_slice).collect();
let v_slices: Vec<_> = vs.iter().map(Vec::as_slice).collect();
session
.encode(v_slices.as_slice(), coding_blocks_slices.as_mut_slice())
.expect("encoding must succeed");
trace!("test_coding: coding blocks:");
for b in &coding_blocks {
trace!("test_coding: {:?}", b);
}
let erasure: usize = 1;
let present = &mut [true; N_DATA + N_CODING];
present[erasure] = false;
let erased = vs[erasure].clone();
// clear an entry
vs[erasure as usize].copy_from_slice(&[0; 16]);
let mut blocks: Vec<_> = vs
.iter_mut()
.chain(coding_blocks.iter_mut())
.map(Vec::as_mut_slice)
.collect();
session
.decode_blocks(blocks.as_mut_slice(), present)
.expect("decoding must succeed");
trace!("test_coding: vs:");
for v in &vs {
trace!("test_coding: {:?}", v);
}
assert_eq!(v_orig, vs[0]);
assert_eq!(erased, vs[erasure]);
}
fn test_toss_and_recover(
session: &Session,
data_blobs: &[SharedBlob],
coding_blobs: &[SharedBlob],
block_start_idx: usize,
) {
let size = coding_blobs[0].read().unwrap().size();
let mut blobs: Vec<SharedBlob> = Vec::with_capacity(ERASURE_SET_SIZE);
blobs.push(SharedBlob::default()); // empty data, erasure at zero
for blob in &data_blobs[block_start_idx + 1..block_start_idx + NUM_DATA] {
// skip first blob
blobs.push(blob.clone());
}
blobs.push(SharedBlob::default()); // empty coding, erasure at zero
for blob in &coding_blobs[1..NUM_CODING] {
blobs.push(blob.clone());
}
// toss one data and one coding
let mut present = vec![true; blobs.len()];
present[0] = false;
present[NUM_DATA] = false;
let (recovered_data, recovered_coding) = session
.reconstruct_shared_blobs(&mut blobs, &present, size, block_start_idx as u64, 0)
.expect("reconstruction must succeed");
assert_eq!(recovered_data.len(), 1);
assert_eq!(recovered_coding.len(), 1);
assert_eq!(
blobs[1].read().unwrap().meta,
data_blobs[block_start_idx + 1].read().unwrap().meta
);
assert_eq!(
blobs[1].read().unwrap().data(),
data_blobs[block_start_idx + 1].read().unwrap().data()
);
assert_eq!(
recovered_data[0].meta,
data_blobs[block_start_idx].read().unwrap().meta
);
assert_eq!(
recovered_data[0].data(),
data_blobs[block_start_idx].read().unwrap().data()
);
assert_eq!(
recovered_coding[0].data(),
coding_blobs[0].read().unwrap().data()
);
}
#[test]
fn test_erasure_generate_coding() {
morgan_logger::setup();
// trivial case
let mut coding_generator = CodingGenerator::default();
let blobs = Vec::new();
for _ in 0..NUM_DATA * 2 {
let coding = coding_generator.next(&blobs);
assert!(coding.is_empty());
}
// test coding by iterating one blob at a time
let data_blobs = generate_test_blobs(0, NUM_DATA * 2);
for (i, blob) in data_blobs.iter().cloned().enumerate() {
let coding_blobs = coding_generator.next(&[blob]);
if !coding_blobs.is_empty() {
assert_eq!(i % NUM_DATA, NUM_DATA - 1);
assert_eq!(coding_blobs.len(), NUM_CODING);
for j in 0..NUM_CODING {
assert_eq!(
coding_blobs[j].read().unwrap().index(),
((i / NUM_DATA) * NUM_DATA + j) as u64
);
}
test_toss_and_recover(
&coding_generator.session,
&data_blobs,
&coding_blobs,
i - (i % NUM_DATA),
);
}
}
}
#[test]
fn test_erasure_generate_coding_reset_on_new_slot() {
morgan_logger::setup();
let mut coding_generator = CodingGenerator::default();
// test coding by iterating one blob at a time
let data_blobs = generate_test_blobs(0, NUM_DATA * 2);
for i in NUM_DATA..NUM_DATA * 2 {
data_blobs[i].write().unwrap().set_slot(1);
}
let coding_blobs = coding_generator.next(&data_blobs[0..NUM_DATA - 1]);
assert!(coding_blobs.is_empty());
let coding_blobs = coding_generator.next(&data_blobs[NUM_DATA..]);
assert_eq!(coding_blobs.len(), NUM_CODING);
test_toss_and_recover(
&coding_generator.session,
&data_blobs,
&coding_blobs,
NUM_DATA,
);
}
#[test]
fn test_erasure_generate_blocktree_with_coding() {
let cases = vec![
(NUM_DATA, NUM_CODING, 7, 5),
(NUM_DATA - 6, NUM_CODING - 1, 5, 7),
];
for (num_data, num_coding, num_slots, num_sets_per_slot) in cases {
let ledger_path = get_tmp_ledger_path!();
let specs = (0..num_slots)
.map(|slot| {
let set_specs = (0..num_sets_per_slot)
.map(|set_index| ErasureSpec {
set_index,
num_data,
num_coding,
})
.collect();
SlotSpec { slot, set_specs }
})
.collect::<Vec<_>>();
let blocktree = generate_blocktree_with_coding(&ledger_path, &specs);
for spec in specs.iter() {
let slot = spec.slot;
for erasure_spec in spec.set_specs.iter() {
let start_index = erasure_spec.set_index * NUM_DATA as u64;
let (data_end, coding_end) = (
start_index + erasure_spec.num_data as u64,
start_index + erasure_spec.num_coding as u64,
);
for idx in start_index..data_end {
let opt_bytes = blocktree.get_data_blob_bytes(slot, idx).unwrap();
assert!(opt_bytes.is_some());
}
for idx in start_index..coding_end {
let opt_bytes = blocktree.get_coding_blob_bytes(slot, idx).unwrap();
assert!(opt_bytes.is_some());
}
}
}
drop(blocktree);
Blocktree::destroy(&ledger_path).expect("Expect successful blocktree destruction");
}
}
#[test]
fn test_recovery_with_model() {
use std::thread;
const MAX_ERASURE_SETS: u64 = 16;
const N_THREADS: usize = 2;
const N_SLOTS: u64 = 10;
morgan_logger::setup();
let specs = (0..N_SLOTS).map(|slot| {
let num_erasure_sets = slot % MAX_ERASURE_SETS;
let set_specs = (0..num_erasure_sets)
.map(|set_index| ErasureSpec {
set_index,
num_data: NUM_DATA,
num_coding: NUM_CODING,
})
.collect();
SlotSpec { slot, set_specs }
});
let mut handles = vec![];
let session = Arc::new(Session::default());
for i in 0..N_THREADS {
let specs = specs.clone();
let session = Arc::clone(&session);
let handle = thread::Builder::new()
.name(i.to_string())
.spawn(move || {
for slot_model in generate_ledger_model(specs) {
for erasure_set in slot_model.chunks {
let erased_coding = erasure_set.coding[0].clone();
let erased_data = erasure_set.data[..3].to_vec();
let mut blobs = Vec::with_capacity(ERASURE_SET_SIZE);
blobs.push(SharedBlob::default());
blobs.push(SharedBlob::default());
blobs.push(SharedBlob::default());
for blob in erasure_set.data.into_iter().skip(3) {
blobs.push(blob);
}
blobs.push(SharedBlob::default());
for blob in erasure_set.coding.into_iter().skip(1) {
blobs.push(blob);
}
let size = erased_coding.read().unwrap().size() as usize;
let mut present = vec![true; ERASURE_SET_SIZE];
present[0] = false;
present[1] = false;
present[2] = false;
present[NUM_DATA] = false;
session
.reconstruct_shared_blobs(
&mut blobs,
&present,
size,
erasure_set.set_index * NUM_DATA as u64,
slot_model.slot,
)
.expect("reconstruction must succeed");
for (expected, recovered) in erased_data.iter().zip(blobs.iter()) {
let expected = expected.read().unwrap();
let mut recovered = recovered.write().unwrap();
let data_size = recovered.data_size() as usize - BLOB_HEADER_SIZE;
recovered.set_size(data_size);
let corrupt = data_size > BLOB_DATA_SIZE;
assert!(!corrupt, "CORRUPTION");
assert_eq!(&*expected, &*recovered);
}
assert_eq!(
erased_coding.read().unwrap().data(),
blobs[NUM_DATA].read().unwrap().data()
);
debug!("passed set: {}", erasure_set.set_index);
}
debug!("passed slot: {}", slot_model.slot);
}
})
.expect("thread build error");
handles.push(handle);
}
handles.into_iter().for_each(|h| h.join().unwrap());
}
/// Generates a model of a ledger containing certain data and coding blobs according to a spec
pub fn generate_ledger_model<'a, I, IntoIt, S>(
specs: I,
) -> impl Iterator<Item = SlotModel> + Clone + 'a
where
I: IntoIterator<Item = S, IntoIter = IntoIt>,
IntoIt: Iterator<Item = S> + Clone + 'a,
S: Borrow<SlotSpec>,
{
let mut coding_generator = CodingGenerator::default();
specs.into_iter().map(move |spec| {
let spec = spec.borrow();
let slot = spec.slot;
let chunks = spec
.set_specs
.iter()
.map(|erasure_spec| {
let set_index = erasure_spec.set_index as usize;
let start_index = set_index * NUM_DATA;
let mut blobs = generate_test_blobs(0, NUM_DATA);
index_blobs(
&blobs,
&Keypair::new().pubkey(),
start_index as u64,
slot,
0,
);
let mut coding_blobs = coding_generator.next(&blobs);
blobs.drain(erasure_spec.num_data..);
coding_blobs.drain(erasure_spec.num_coding..);
ErasureSetModel {
start_index: start_index as u64,
set_index: set_index as u64,
data: blobs,
coding: coding_blobs,
}
})
.collect();
SlotModel { slot, chunks }
})
}
/// Genarates a ledger according to the given specs.
/// Blocktree should have correct SlotMeta and ErasureMeta and so on but will not have done any
/// possible recovery.
pub fn generate_blocktree_with_coding(ledger_path: &str, specs: &[SlotSpec]) -> Blocktree {
let blocktree = Blocktree::open(ledger_path).unwrap();
let model = generate_ledger_model(specs);
for slot_model in model {
let slot = slot_model.slot;
for erasure_set in slot_model.chunks {
blocktree.write_shared_blobs(erasure_set.data).unwrap();
for shared_coding_blob in erasure_set.coding.into_iter() {
let blob = shared_coding_blob.read().unwrap();
blocktree
.put_coding_blob_bytes_raw(
slot,
blob.index(),
&blob.data[..blob.size() + BLOB_HEADER_SIZE],
)
.unwrap();
}
}
}
blocktree
}
// fn verify_test_blobs(offset: usize, blobs: &[SharedBlob]) -> bool {
// let data: Vec<_> = (0..BLOB_DATA_SIZE).into_iter().map(|i| i as u8).collect();
//
// blobs.iter().enumerate().all(|(i, blob)| {
// let blob = blob.read().unwrap();
// blob.index() as usize == i + offset && blob.data() == &data[..]
// })
// }
//
fn generate_test_blobs(offset: usize, num_blobs: usize) -> Vec<SharedBlob> {
let data: Vec<_> = (0..BLOB_DATA_SIZE).into_iter().map(|i| i as u8).collect();
let blobs: Vec<_> = (0..num_blobs)
.into_iter()
.map(|_| {
let mut blob = Blob::default();
blob.data_mut()[..data.len()].copy_from_slice(&data);
blob.set_size(data.len());
Arc::new(RwLock::new(blob))
})
.collect();
index_blobs(&blobs, &Pubkey::new_rand(), offset as u64, 0, 0);
blobs
}
impl Session {
fn reconstruct_shared_blobs(
&self,
blobs: &mut [SharedBlob],
present: &[bool],
size: usize,
block_start_idx: u64,
slot: u64,
) -> Result<(Vec<Blob>, Vec<Blob>)> {
let mut locks: Vec<std::sync::RwLockWriteGuard<_>> = blobs
.iter()
.map(|shared_blob| shared_blob.write().unwrap())
.collect();
let mut slices: Vec<_> = locks
.iter_mut()
.enumerate()
.map(|(i, blob)| {
if i < NUM_DATA {
&mut blob.data[..size]
} else {
&mut blob.data_mut()[..size]
}
})
.collect();
self.reconstruct_blobs(&mut slices, present, size, block_start_idx, slot)
}
}
}
|
use test_data_object::*;
use windows::core::*;
use Windows::Win32::Foundation::*;
use Windows::Win32::System::Com::*;
#[implement(Windows::Win32::System::Com::IDataObject)]
#[derive(Default)]
#[allow(non_snake_case)]
struct Test {
GetData: bool,
GetDataHere: bool,
QueryGetData: bool,
GetCanonicalFormatEtc: bool,
SetData: bool,
EnumFormatEtc: bool,
DAdvise: bool,
DUnadvise: bool,
EnumDAdvise: bool,
}
#[allow(non_snake_case)]
impl Test {
fn GetData(&mut self, _: *const FORMATETC) -> Result<STGMEDIUM> {
self.GetData = true;
Ok(STGMEDIUM { tymed: 0, Anonymous: STGMEDIUM_0 { pstg: core::ptr::null_mut() }, pUnkForRelease: None })
}
fn GetDataHere(&mut self, _: *const FORMATETC, _: *mut STGMEDIUM) -> Result<()> {
self.GetDataHere = true;
Ok(())
}
fn QueryGetData(&mut self, _: *const FORMATETC) -> Result<()> {
self.QueryGetData = true;
Ok(())
}
fn GetCanonicalFormatEtc(&mut self, _: *const FORMATETC) -> Result<FORMATETC> {
self.GetCanonicalFormatEtc = true;
Ok(FORMATETC::default())
}
fn SetData(&mut self, _: *const FORMATETC, _: *const STGMEDIUM, _: BOOL) -> Result<()> {
self.SetData = true;
Ok(())
}
fn EnumFormatEtc(&mut self, _: u32) -> Result<IEnumFORMATETC> {
self.EnumFormatEtc = true;
Err(Error::OK)
}
fn DAdvise(&mut self, _: *const FORMATETC, _: u32, _: &Option<IAdviseSink>) -> Result<u32> {
self.DAdvise = true;
Ok(0)
}
fn DUnadvise(&mut self, _: u32) -> Result<()> {
self.DUnadvise = true;
Ok(())
}
fn EnumDAdvise(&mut self) -> Result<IEnumSTATDATA> {
self.EnumDAdvise = true;
Err(Error::OK)
}
}
#[test]
fn test() -> Result<()> {
unsafe {
let d: IDataObject = Test::default().into();
d.GetData(core::ptr::null_mut())?;
d.GetDataHere(core::ptr::null_mut(), core::ptr::null_mut())?;
d.QueryGetData(core::ptr::null_mut())?;
d.GetCanonicalFormatEtc(core::ptr::null_mut())?;
d.SetData(core::ptr::null_mut(), core::ptr::null_mut(), false)?;
let _ = d.EnumFormatEtc(0);
d.DAdvise(core::ptr::null_mut(), 0, None)?;
d.DUnadvise(0)?;
let _ = d.EnumDAdvise();
let i = Test::to_impl(&d);
assert!(i.GetData);
assert!(i.GetDataHere);
assert!(i.QueryGetData);
assert!(i.GetCanonicalFormatEtc);
assert!(i.SetData);
assert!(i.EnumFormatEtc);
assert!(i.DAdvise);
assert!(i.DUnadvise);
assert!(i.EnumDAdvise);
Ok(())
}
}
|
//! This implementation has been deprecated, but still used in the Python
//! binding due to an unknown issue with the v1 implementation, to reproduce:
//!
//! 1. `let g:clap_force_python = 1`.
//! 2. open https://github.com/subspace/subspace/blob/c50bec907ab8ade923a2a0b4888f43bfc47e8a7f/polkadot/node/collation-generation/src/lib.rs
//! 3. Type `sr` and then you'll see Neovim hang forever, have no idea&time to fix it
//! properly therefore the old implementation are just kept.
use super::AsciiDots;
// https://stackoverflow.com/questions/51982999/slice-a-string-containing-unicode-chars
#[inline]
fn utf8_str_slice(line: &str, start: usize, end: usize) -> String {
line.chars().take(end).skip(start).collect()
}
pub fn trim_text(
line: &str,
indices: &[usize],
container_width: usize,
skipped: Option<usize>,
) -> Option<(String, Vec<usize>)> {
let last_idx = indices.last()?;
if *last_idx > container_width {
let mut start = *last_idx - container_width;
if start >= indices[0] || (indices.len() > 1 && *last_idx - start > container_width) {
start = indices[0];
}
let line_len = line.len();
// [--------------------------]
// [-----------------------------------------------------------------xx--x--]
for _ in 0..3 {
if indices[0] - start >= AsciiDots::CHAR_LEN && line_len - start >= container_width {
start += AsciiDots::CHAR_LEN;
} else {
break;
}
}
let trailing_dist = line_len - last_idx;
if trailing_dist < indices[0] - start {
start += trailing_dist;
}
let end = line.len();
let left_truncated = if let Some(n) = skipped {
let icon: String = line.chars().take(n).collect();
format!(
"{}{}{}",
icon,
AsciiDots::DOTS,
utf8_str_slice(line, start, end)
)
} else {
format!("{}{}", AsciiDots::DOTS, utf8_str_slice(line, start, end))
};
let offset = line_len.saturating_sub(left_truncated.len());
let left_truncated_len = left_truncated.len();
let (truncated, max_index) = if left_truncated_len > container_width {
if left_truncated_len == container_width + 1 {
let left_truncated = utf8_str_slice(&left_truncated, 0, container_width - 1);
(format!("{left_truncated}."), container_width - 1)
} else {
let left_truncated = utf8_str_slice(&left_truncated, 0, container_width - 2);
(
format!("{left_truncated}{}", AsciiDots::DOTS),
container_width - AsciiDots::CHAR_LEN,
)
}
} else {
(left_truncated, container_width)
};
let truncated_indices = indices
.iter()
.map(|x| x - offset)
.take_while(|x| *x < max_index)
.collect::<Vec<_>>();
Some((truncated, truncated_indices))
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::utf8_str_slice;
#[test]
fn test_print_multibyte_string_slice() {
let multibyte_str = "README.md:23:1:Gourinath Banda. “Scalable Real-Time Kernel for Small Embedded Systems”. En- glish. PhD thesis. Denmark: University of Southern Denmark, June 2003. URL: http://citeseerx.ist.psu.edu/viewdoc/download;jsessionid=84D11348847CDC13691DFAED09883FCB?doi=10.1.1.118.1909&rep=rep1&type=pdf.";
let start = 33;
let end = 300;
let expected = "Scalable Real-Time Kernel for Small Embedded Systems”. En- glish. PhD thesis. Denmark: University of Southern Denmark, June 2003. URL: http://citeseerx.ist.psu.edu/viewdoc/download;jsessionid=84D11348847CDC13691DFAED09883FCB?doi=10.1.1.118.1909&rep=rep1&type=pdf.";
assert_eq!(expected, utf8_str_slice(multibyte_str, start, end));
}
}
|
//! C API wrappers for calling Telamon through FFI.
//!
//! The goal of the C API is to provide thin wrappers over existing Rust
//! functionality, and only provides some cosmetic improvements to try and
//! provide a somewhat idiomatic C interface.
#[macro_use]
pub mod error;
pub mod explorer;
pub mod ir;
pub mod search_space;
use libc::{c_char, c_int, c_uint, size_t};
use telamon::device;
use telamon::explorer::config::Config;
use telamon::helper::{MemInit, TilingPattern};
pub use telamon_kernels::{linalg, Kernel};
use telamon_x86 as x86;
// Pointers to `device::Context` and `device::Device` are not C-like pointers.
// Instead, they are fat pointers containing both a regular pointer to the
// object and a pointer to the vtable. Thus, we define wrappers to encapsulate
// the pointers in an opaque type and we return pointers to the wrappers to C
// users.
/// Description of the evaluation context. In particular, in contains the
/// mapping between argument names and argument values.
pub struct Context(pub(crate) *const dyn device::Context);
/// Description of the targeted device.
pub struct Device(*const dyn device::Device);
/// Initializes the logger.
#[no_mangle]
pub extern "C" fn env_logger_try_init() {
let _ = env_logger::try_init();
}
/// Supported device types for running kernels.
#[repr(C)]
pub enum DeviceId {
X86,
Cuda,
}
/// Supported kernels.
#[derive(Clone)]
pub enum KernelParameters {
/// A matrix-matrix multiplication kernel.
MatMul(linalg::FusedMMP),
}
impl KernelParameters {
/// Runs the search for a best candidate.
fn optimize_kernel<'a, C: device::ArgMap<'a> + device::Context>(
&self,
config: &Config,
context: &mut C,
) {
match self {
KernelParameters::MatMul(params) => {
linalg::FusedMM::<f32>::benchmark(
config,
params.clone(),
0,
MemInit::RandomFill,
context,
);
}
}
}
}
/// Helper function to create a TilingPattern from a buffer of u32
/// values without transferring ownership (it performs a copy).
/// Returns None when data is null.
unsafe fn c_tiling_pattern(data: *const u32, len: usize) -> Option<TilingPattern> {
if data.is_null() {
None
} else {
Some(std::slice::from_raw_parts(data, len).into())
}
}
/// Instanciate a new kernel for matrix-matrix multiplication. The
/// caller is responsible for deallocating the returned pointer using
/// kernel_free. The tile_m, tile_n and tile_k parameters are read
/// from during the call, but no pointer to the corresponding data is
/// kept afterwards.
#[no_mangle]
pub unsafe extern "C" fn kernel_matmul_new(
m: c_int,
n: c_int,
k: c_int,
a_stride: c_uint,
transpose_a: c_int,
transpose_b: c_int,
generic: c_int,
tile_m: *const u32,
tile_m_len: size_t,
tile_n: *const u32,
tile_n_len: size_t,
tile_k: *const u32,
tile_k_len: size_t,
) -> *mut KernelParameters {
Box::into_raw(Box::new(KernelParameters::MatMul(linalg::FusedMMP {
m: m as i32,
n: n as i32,
k: k as i32,
a_stride: a_stride as u32,
transpose_a: transpose_a == 1,
transpose_b: transpose_b == 1,
generic: generic == 1,
m_tiling: c_tiling_pattern(tile_m, tile_m_len),
n_tiling: c_tiling_pattern(tile_n, tile_n_len),
k_tiling: c_tiling_pattern(tile_k, tile_k_len),
activation_fun: None,
})))
}
/// Deallocates kernel parameters created through one of the `kernel_*_new`
/// functions. The `params` pointer becomes invalid and must not be used again
/// after calling `kernel_free`.
#[no_mangle]
pub unsafe extern "C" fn kernel_free(params: *mut KernelParameters) {
std::mem::drop(Box::from_raw(params));
}
/// Optimize a kernel on a given device. `config_data` points to a JSON-encoded
/// string of length `config_len` containing the configuration parameters for
/// the explorer.
#[no_mangle]
pub unsafe extern "C" fn kernel_optimize(
params: *mut KernelParameters,
device: DeviceId,
config_data: *const c_char,
config_len: size_t,
) -> bool {
let config = {
let config_str = {
let slice = std::slice::from_raw_parts(config_data as *const u8, config_len);
std::str::from_utf8(slice).expect("Invalid configuration string")
};
// TODO: Should not unwrap here.
Config::from_json(config_str).unwrap()
};
match device {
DeviceId::X86 => (*params).optimize_kernel(&config, &mut x86::Context::default()),
DeviceId::Cuda => {
#[cfg(feature = "cuda")]
{
let executor = ::telamon_cuda::Executor::init();
let mut context = ::telamon_cuda::Context::new(&executor);
(*params).optimize_kernel(&config, &mut context);
}
#[cfg(not(feature = "cuda"))]
return false;
}
};
true
}
|
//! ```elixir
//! # label 2
//! # pushed to stack: (document)
//! # returned form call: {:ok, existing_child}
//! # full stack: ({:ok, existing_child}, document)
//! # returns: {:ok, parent}
//! {:ok, parent} = Lumen.Web.Document.create_element(parent_document, "div")
//! :ok = Lumen.Web.Node.append_child(document, parent)
//! :ok = Lumen.Web.Node.append_child(parent, existing_child)
//! {:ok, new_child} = Lumen.Web.Document.create_element(document, "ul");
//! {:ok, inserted_child} = Lumen.Web.insert_before(parent, new_child, nil)
//! ```
use std::convert::TryInto;
use liblumen_alloc::erts::exception;
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::*;
use super::label_3;
#[native_implemented::label]
fn result(process: &Process, ok_existing_child: Term, document: Term) -> exception::Result<Term> {
assert!(
ok_existing_child.is_boxed_tuple(),
"ok_existing_child ({:?}) is not a tuple",
ok_existing_child
);
let ok_existing_child_tuple: Boxed<Tuple> = ok_existing_child.try_into().unwrap();
assert_eq!(ok_existing_child_tuple.len(), 2);
assert_eq!(ok_existing_child_tuple[0], Atom::str_to_term("ok"));
let existing_child = ok_existing_child_tuple[1];
assert!(existing_child.is_boxed_resource_reference());
assert!(document.is_boxed_resource_reference());
let parent_tag = process.binary_from_str("div");
process.queue_frame_with_arguments(
liblumen_web::document::create_element_2::frame()
.with_arguments(false, &[document, parent_tag]),
);
process.queue_frame_with_arguments(
label_3::frame().with_arguments(true, &[document, existing_child]),
);
Ok(Term::NONE)
}
|
#![recursion_limit = "256"]
mod components;
mod utils;
use crate::components::game::Game;
use wasm_bindgen::prelude::*;
use yew::prelude::*;
#[wasm_bindgen(start)]
pub fn run_app() {
App::<Game>::new().mount_to_body();
}
|
use ash::vk;
use gpu_allocator::{vulkan::Allocation, MemoryLocation};
use super::{
allocator::Allocator,
queue,
surface::{self, SurfaceWrapper},
GraphicsResult,
};
const PREFERRED_IMAGE_COUNT: u32 = 3;
#[allow(dead_code)]
pub struct SwapchainWrapper {
pub swapchain_loader: ash::extensions::khr::Swapchain,
pub swapchain: vk::SwapchainKHR,
pub images: Vec<vk::Image>,
pub imageviews: Vec<vk::ImageView>,
pub depth_image: vk::Image, // used in gpass and resolve pass
pub depth_image_alloc: Allocation,
pub depth_imageview: vk::ImageView,
pub depth_imageview_depth_only: vk::ImageView,
pub surface_format: vk::SurfaceFormatKHR,
pub extent: vk::Extent2D,
pub amount_of_images: u32,
pub resolve_image: vk::Image, // will contain the finished deferred scene rendering
pub resolve_imageview: vk::ImageView,
pub resolve_image_alloc: Allocation,
pub g0_image: vk::Image,
pub g0_imageview: vk::ImageView,
pub g0_image_alloc: Allocation,
pub g1_image: vk::Image,
pub g1_imageview: vk::ImageView,
pub g1_image_alloc: Allocation,
pub framebuffer_deferred: vk::Framebuffer, // used for gpass and resolve pass, renders to resolve_image
pub framebuffer_pp_a: vk::Framebuffer, // used for pp, renders to g0_image
pub framebuffer_pp_b: vk::Framebuffer, // used for pp, renders to resolve_image
}
impl SwapchainWrapper {
pub fn init(
instance: &ash::Instance,
physical_device: vk::PhysicalDevice,
logical_device: &ash::Device,
surface: &surface::SurfaceWrapper,
#[allow(unused_variables)] queue_families: &queue::QueueFamilies,
allocator: &Allocator,
) -> GraphicsResult<SwapchainWrapper> {
let surface_capabilities = surface.get_capabilities(physical_device)?;
let extent = surface_capabilities.current_extent; // TODO: handle 0xFFFF x 0xFFFF extent
let surface_format = surface.choose_format(physical_device)?;
let present_mode = surface.choose_present_mode(physical_device)?;
let image_count = if surface_capabilities.max_image_count > 0 {
PREFERRED_IMAGE_COUNT
.max(surface_capabilities.min_image_count)
.min(surface_capabilities.max_image_count)
} else {
PREFERRED_IMAGE_COUNT.max(surface_capabilities.min_image_count)
};
let swapchain_create_info = vk::SwapchainCreateInfoKHR::builder()
.surface(surface.surface)
.min_image_count(image_count)
.image_format(surface_format.format)
.image_color_space(surface_format.color_space)
.image_extent(extent)
.image_array_layers(1)
.image_usage(vk::ImageUsageFlags::TRANSFER_DST | vk::ImageUsageFlags::COLOR_ATTACHMENT)
.image_sharing_mode(vk::SharingMode::EXCLUSIVE)
.pre_transform(surface_capabilities.current_transform)
.composite_alpha(vk::CompositeAlphaFlagsKHR::OPAQUE)
.present_mode(present_mode);
let swapchain_loader = ash::extensions::khr::Swapchain::new(instance, logical_device);
let swapchain = unsafe { swapchain_loader.create_swapchain(&swapchain_create_info, None)? };
let swapchain_images = unsafe { swapchain_loader.get_swapchain_images(swapchain)? };
let amount_of_images = swapchain_images.len() as u32;
let mut swapchain_imageviews = Vec::with_capacity(swapchain_images.len());
for image in &swapchain_images {
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(*image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(surface_format.format)
.subresource_range(*subresource_range);
let imageview =
unsafe { logical_device.create_image_view(&imageview_create_info, None) }?;
swapchain_imageviews.push(imageview);
}
let extend_3d = vk::Extent3D {
width: extent.width,
height: extent.height,
depth: 1,
};
let (depth_image, depth_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::D24_UNORM_S8_UINT,
vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT | vk::ImageUsageFlags::INPUT_ATTACHMENT,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::DEPTH | vk::ImageAspectFlags::STENCIL)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(depth_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::D24_UNORM_S8_UINT)
.subresource_range(*subresource_range);
let depth_imageview =
unsafe { logical_device.create_image_view(&imageview_create_info, None) }?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::DEPTH)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(depth_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::D24_UNORM_S8_UINT)
.subresource_range(*subresource_range);
let depth_imageview_depth_only =
unsafe { logical_device.create_image_view(&imageview_create_info, None) }?;
let (resolve_image, resolve_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::R16G16B16A16_SFLOAT,
vk::ImageUsageFlags::COLOR_ATTACHMENT
| vk::ImageUsageFlags::TRANSFER_SRC
| vk::ImageUsageFlags::SAMPLED,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(resolve_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::R16G16B16A16_SFLOAT)
.subresource_range(*subresource_range);
let resolve_imageview =
unsafe { logical_device.create_image_view(&imageview_create_info, None) }?;
let (g0_image, g0_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::R16G16B16A16_SFLOAT,
vk::ImageUsageFlags::COLOR_ATTACHMENT
| vk::ImageUsageFlags::INPUT_ATTACHMENT
| vk::ImageUsageFlags::TRANSFER_SRC
| vk::ImageUsageFlags::SAMPLED,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(g0_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::R16G16B16A16_SFLOAT)
.subresource_range(*subresource_range);
let g0_imageview =
unsafe { logical_device.create_image_view(&imageview_create_info, None) }?;
let (g1_image, g1_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::R16G16B16A16_SFLOAT,
vk::ImageUsageFlags::COLOR_ATTACHMENT | vk::ImageUsageFlags::INPUT_ATTACHMENT,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(g1_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::R16G16B16A16_SFLOAT)
.subresource_range(*subresource_range);
let g1_imageview =
unsafe { logical_device.create_image_view(&imageview_create_info, None) }?;
Ok(SwapchainWrapper {
swapchain_loader,
swapchain,
images: swapchain_images,
imageviews: swapchain_imageviews,
depth_image,
depth_image_alloc,
depth_imageview,
depth_imageview_depth_only,
surface_format,
extent,
amount_of_images,
g0_image,
g0_image_alloc,
g0_imageview,
g1_image,
g1_image_alloc,
g1_imageview,
resolve_image,
resolve_imageview,
resolve_image_alloc,
framebuffer_deferred: vk::Framebuffer::null(),
framebuffer_pp_a: vk::Framebuffer::null(),
framebuffer_pp_b: vk::Framebuffer::null(),
})
}
// TODO: handle error
pub fn aquire_next_image(&self, signal_semaphore: vk::Semaphore) -> u32 {
let (image_index, _) = unsafe {
self.swapchain_loader
.acquire_next_image(
self.swapchain,
std::u64::MAX,
signal_semaphore,
vk::Fence::null(),
)
.expect("image acquisition trouble")
};
image_index
}
pub fn create_framebuffers(
&mut self,
logical_device: &ash::Device,
renderpass: vk::RenderPass,
pp_renderpass: vk::RenderPass,
) -> Result<(), vk::Result> {
// deferred framebuffer
let views = [
self.resolve_imageview,
self.depth_imageview,
self.g0_imageview,
self.g1_imageview,
];
let fb_info = vk::FramebufferCreateInfo::builder()
.render_pass(renderpass)
.attachments(&views)
.width(self.extent.width)
.height(self.extent.height)
.layers(1)
.build();
self.framebuffer_deferred = unsafe { logical_device.create_framebuffer(&fb_info, None)? };
// PP a framebuffer
let views = [self.g0_imageview];
let fb_info = vk::FramebufferCreateInfo::builder()
.render_pass(pp_renderpass)
.attachments(&views)
.width(self.extent.width)
.height(self.extent.height)
.layers(1)
.build();
self.framebuffer_pp_a = unsafe { logical_device.create_framebuffer(&fb_info, None)? };
// PP b framebuffer
let views = [self.resolve_imageview];
let fb_info = vk::FramebufferCreateInfo::builder()
.render_pass(pp_renderpass)
.attachments(&views)
.width(self.extent.width)
.height(self.extent.height)
.layers(1)
.build();
self.framebuffer_pp_b = unsafe { logical_device.create_framebuffer(&fb_info, None)? };
Ok(())
}
pub unsafe fn cleanup(&mut self, logical_device: &ash::Device, allocator: &Allocator) {
logical_device.destroy_framebuffer(self.framebuffer_deferred, None);
logical_device.destroy_framebuffer(self.framebuffer_pp_a, None);
logical_device.destroy_framebuffer(self.framebuffer_pp_b, None);
logical_device.destroy_image_view(self.depth_imageview, None);
logical_device.destroy_image_view(self.depth_imageview_depth_only, None);
allocator.destroy_image(self.depth_image, self.depth_image_alloc.clone());
logical_device.destroy_image_view(self.g0_imageview, None);
allocator.destroy_image(self.g0_image, self.g0_image_alloc.clone());
logical_device.destroy_image_view(self.g1_imageview, None);
allocator.destroy_image(self.g1_image, self.g1_image_alloc.clone());
logical_device.destroy_image_view(self.resolve_imageview, None);
allocator.destroy_image(self.resolve_image, self.resolve_image_alloc.clone());
for iv in &self.imageviews {
logical_device.destroy_image_view(*iv, None);
}
self.swapchain_loader
.destroy_swapchain(self.swapchain, None)
}
pub(crate) fn recreate(
&mut self,
device: &ash::Device,
physical_device: vk::PhysicalDevice,
allocator: &Allocator,
surface: &SurfaceWrapper,
renderpass: vk::RenderPass,
pp_renderpass: vk::RenderPass,
) -> GraphicsResult<()> {
unsafe {
device.destroy_framebuffer(self.framebuffer_deferred, None);
device.destroy_framebuffer(self.framebuffer_pp_a, None);
device.destroy_framebuffer(self.framebuffer_pp_b, None);
device.destroy_image_view(self.depth_imageview, None);
device.destroy_image_view(self.depth_imageview_depth_only, None);
allocator.destroy_image(self.depth_image, self.depth_image_alloc.clone());
device.destroy_image_view(self.g0_imageview, None);
allocator.destroy_image(self.g0_image, self.g0_image_alloc.clone());
device.destroy_image_view(self.g1_imageview, None);
allocator.destroy_image(self.g1_image, self.g1_image_alloc.clone());
device.destroy_image_view(self.resolve_imageview, None);
allocator.destroy_image(self.resolve_image, self.resolve_image_alloc.clone());
for iv in &self.imageviews {
device.destroy_image_view(*iv, None);
}
self.imageviews.clear();
}
let surface_capabilities = surface.get_capabilities(physical_device)?;
self.extent = surface_capabilities.current_extent; // TODO: handle 0xFFFF x 0xFFFF extent
self.surface_format = surface.choose_format(physical_device)?;
let present_mode = surface.choose_present_mode(physical_device)?;
let image_count = if surface_capabilities.max_image_count > 0 {
PREFERRED_IMAGE_COUNT
.max(surface_capabilities.min_image_count)
.min(surface_capabilities.max_image_count)
} else {
PREFERRED_IMAGE_COUNT.max(surface_capabilities.min_image_count)
};
let swapchain_create_info = vk::SwapchainCreateInfoKHR::builder()
.surface(surface.surface)
.min_image_count(image_count)
.image_format(self.surface_format.format)
.image_color_space(self.surface_format.color_space)
.image_extent(self.extent)
.image_array_layers(1)
.image_usage(vk::ImageUsageFlags::TRANSFER_DST | vk::ImageUsageFlags::COLOR_ATTACHMENT)
.image_sharing_mode(vk::SharingMode::EXCLUSIVE)
.pre_transform(surface_capabilities.current_transform)
.composite_alpha(vk::CompositeAlphaFlagsKHR::OPAQUE)
.present_mode(present_mode)
.old_swapchain(self.swapchain)
.build();
self.swapchain = unsafe {
self.swapchain_loader
.create_swapchain(&swapchain_create_info, None)?
};
self.images = unsafe { self.swapchain_loader.get_swapchain_images(self.swapchain)? };
for image in &self.images {
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(*image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(self.surface_format.format)
.subresource_range(*subresource_range);
let imageview = unsafe { device.create_image_view(&imageview_create_info, None) }?;
self.imageviews.push(imageview);
}
let extend_3d = vk::Extent3D {
width: self.extent.width,
height: self.extent.height,
depth: 1,
};
let (depth_image, depth_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::D24_UNORM_S8_UINT,
vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT | vk::ImageUsageFlags::INPUT_ATTACHMENT,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::DEPTH | vk::ImageAspectFlags::STENCIL)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(depth_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::D24_UNORM_S8_UINT)
.subresource_range(*subresource_range);
let depth_imageview = unsafe { device.create_image_view(&imageview_create_info, None) }?;
self.depth_image = depth_image;
self.depth_image_alloc = depth_image_alloc;
self.depth_imageview = depth_imageview;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::DEPTH)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(depth_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::D24_UNORM_S8_UINT)
.subresource_range(*subresource_range);
let depth_imageview_depth_only =
unsafe { device.create_image_view(&imageview_create_info, None) }?;
self.depth_imageview_depth_only = depth_imageview_depth_only;
let (resolve_image, resolve_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::R16G16B16A16_SFLOAT,
vk::ImageUsageFlags::COLOR_ATTACHMENT
| vk::ImageUsageFlags::TRANSFER_SRC
| vk::ImageUsageFlags::SAMPLED,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(resolve_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::R16G16B16A16_SFLOAT)
.subresource_range(*subresource_range);
let resolve_imageview = unsafe { device.create_image_view(&imageview_create_info, None) }?;
self.resolve_image = resolve_image;
self.resolve_image_alloc = resolve_image_alloc;
self.resolve_imageview = resolve_imageview;
let (g0_image, g0_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::R16G16B16A16_SFLOAT,
vk::ImageUsageFlags::COLOR_ATTACHMENT
| vk::ImageUsageFlags::INPUT_ATTACHMENT
| vk::ImageUsageFlags::TRANSFER_SRC
| vk::ImageUsageFlags::SAMPLED,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(g0_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::R16G16B16A16_SFLOAT)
.subresource_range(*subresource_range);
let g0_imageview = unsafe { device.create_image_view(&imageview_create_info, None) }?;
self.g0_image = g0_image;
self.g0_image_alloc = g0_image_alloc;
self.g0_imageview = g0_imageview;
let (g1_image, g1_image_alloc) = allocator.create_image(
extend_3d.width,
extend_3d.height,
vk::Format::R16G16B16A16_SFLOAT,
vk::ImageUsageFlags::COLOR_ATTACHMENT | vk::ImageUsageFlags::INPUT_ATTACHMENT,
MemoryLocation::GpuOnly,
)?;
let subresource_range = vk::ImageSubresourceRange::builder()
.aspect_mask(vk::ImageAspectFlags::COLOR)
.base_mip_level(0)
.level_count(1)
.base_array_layer(0)
.layer_count(1);
let imageview_create_info = vk::ImageViewCreateInfo::builder()
.image(g1_image)
.view_type(vk::ImageViewType::TYPE_2D)
.format(vk::Format::R16G16B16A16_SFLOAT)
.subresource_range(*subresource_range);
let g1_imageview = unsafe { device.create_image_view(&imageview_create_info, None) }?;
self.g1_image = g1_image;
self.g1_image_alloc = g1_image_alloc;
self.g1_imageview = g1_imageview;
self.create_framebuffers(device, renderpass, pp_renderpass)?;
Ok(())
}
}
|
//! Utility wrappers to simplify writing OpenGL code.
//!
//! This crate aspires to provide an abstraction over OpenGL's raw API in order to simplify the
//! task of writing higher-level rendering code for OpenGL. `gl-util` is much in the vein of
//! [glutin](https://github.com/tomaka/glium) and [gfx-rs](https://github.com/gfx-rs/gfx),
//! the main difference being that it is much more poorly constructed and is being developed by
//! someone much less OpenGL experience.
#![feature(associated_consts)]
#![feature(pub_restricted)]
extern crate bootstrap_rs as bootstrap;
extern crate bootstrap_gl as gl;
use context::{Context, ContextInner};
use gl::*;
use shader::Program;
use std::mem;
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use texture::Texture2d;
pub use gl::{
AttributeLocation,
Comparison,
DestFactor,
DrawMode,
Face,
PolygonMode,
ShaderType,
SourceFactor,
WindingOrder,
};
pub mod context;
pub mod shader;
pub mod texture;
#[cfg(target_os="windows")]
#[path="windows\\mod.rs"]
pub mod platform;
/// Describes the layout of vertex data in a `VertexBuffer`.
///
/// See [`VertexArray::map_attrib_location()`][VertexArray::map_attrib_location] for more information.
///
/// [VertexArray::map_attrib_location]: TODO: Figure out link.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct AttribLayout {
/// The number of primitive elements per vertex.
///
/// For example, a 3D vector with x, y, and z coordinates has 3 elements. This may not be
/// larger than 4.
pub elements: usize,
/// The distance, in elements, between instances of the vertex attribute.
///
/// Generally this will either be the total number of elements in a vertex, indicating that
/// vertex attribs are interleaved in the buffer, or 0, indicating that attribs are tightly
/// packed within the buffer.
pub stride: usize,
/// The offset, in elements, from the start of the buffer where the attrib first appears.
pub offset: usize,
}
#[derive(Debug)]
pub struct VertexArray {
vertex_array_name: VertexArrayName,
vertex_buffer_name: BufferName,
index_buffer: Option<IndexBuffer>,
/// The number of primitive elments in the buffer.
///
/// Does not reflect the number of vertices in the buffer.
vertex_primitive_len: usize,
/// The total number of primitive elements per vertex.
///
/// Used to can determine how many vertices are in the buffer.
elements_per_vertex: usize,
context: Rc<RefCell<ContextInner>>,
}
impl VertexArray {
/// Creates a new VAO and vertex buffer, filling the buffer with the provided data.
// TODO: Is this operation fallible? If so it should return a `Result<T>`.
pub fn new(context: &Context, vertex_data: &[f32]) -> VertexArray {
let context_inner = context.inner();
let (vertex_buffer_name, vertex_array_name) = unsafe {
let mut context = context_inner.borrow_mut();
let _guard = ::context::ContextGuard::new(context.raw());
// Create the VAO and VBO.
let vertex_array = gl::gen_vertex_array().expect("Failed to create vertex array object");
let buffer_name = gl::gen_buffer().expect("Failed to create buffer object");
// Bind the VAO to the context, then bind the buffer to the VAO.
context.bind_vertex_array(vertex_array);
gl::bind_buffer(BufferTarget::Array, buffer_name);
// Fill the VBO with data.
gl::buffer_data(
BufferTarget::Array,
vertex_data,
BufferUsage::StaticDraw,
);
(buffer_name, vertex_array)
};
VertexArray {
vertex_array_name: vertex_array_name,
vertex_buffer_name: vertex_buffer_name,
index_buffer: None,
vertex_primitive_len: vertex_data.len(),
elements_per_vertex: 0,
context: context_inner,
}
}
/// Creates a new VAO with the provided vertex and index data.
pub fn with_index_buffer(context: &Context, vertex_data: &[f32], index_data: &[u32]) -> VertexArray {
let mut vertex_array = VertexArray::new(context, vertex_data);
let index_buffer_name = unsafe {
let context = vertex_array.context.borrow_mut();
let _guard = ::context::ContextGuard::new(context.raw());
let buffer_name = gl::gen_buffer().expect("Failed to generate buffer object");
gl::bind_buffer(BufferTarget::ElementArray, buffer_name);
gl::buffer_data(
BufferTarget::ElementArray,
index_data,
BufferUsage::StaticDraw,
);
buffer_name
};
vertex_array.index_buffer = Some(IndexBuffer {
name: index_buffer_name,
primitive_len: index_data.len(),
});
vertex_array
}
/// Declares a vetex attribute within the vertex buffer.
pub fn set_attrib(
&mut self,
attrib_location: AttributeLocation,
layout: AttribLayout,
) {
assert!(
layout.elements <= 4,
"Layout elements must not be more than 4 (was actually {})",
layout.elements,
);
// TODO: Verify validity of layout?
// TODO: Verify that `attrib_location` is valid? How would we even do that?
// Update the total number of elements per vertex.
self.elements_per_vertex += layout.elements;
unsafe {
let mut context = self.context.borrow_mut();
let _guard = ::context::ContextGuard::new(context.raw());
context.bind_vertex_array(self.vertex_array_name);
gl::enable_vertex_attrib_array(attrib_location);
gl::vertex_attrib_pointer(
attrib_location,
layout.elements as i32,
GlType::Float,
False,
(layout.stride * mem::size_of::<f32>()) as i32, // TODO: Correctly handle non-f32
layout.offset * mem::size_of::<f32>(), // attrib data types.
);
}
}
}
impl Drop for VertexArray {
fn drop(&mut self) {
let mut context = self.context.borrow_mut();
let _guard = ::context::ContextGuard::new(context.raw());
let buffers = &mut [self.vertex_buffer_name, self.index_buffer.clone().map_or(BufferName::null(), |buf| buf.name)];
unsafe {
gl::delete_vertex_arrays(1, &mut self.vertex_array_name);
gl::delete_buffers(2, buffers.as_ptr());
}
context.unbind_vertex_array(self.vertex_array_name);
}
}
/// Represents a buffer of index data used to index into a `VertexBuffer` when drawing.
#[derive(Debug, Clone, Copy)]
struct IndexBuffer {
name: BufferName,
/// The number of indices in the index buffer.
///
/// This does not reflect number of primitive shapes described by the index buffer, e.g. an
/// index length of 3 may only describe a single triangle.
primitive_len: usize,
}
/// A configuration object for specifying all of the various configurable options for a draw call.
// TODO: Change `DrawBuidler` to cull backfaces by default.
pub struct DrawBuilder<'a> {
vertex_array: &'a VertexArray,
draw_mode: DrawMode,
polygon_mode: Option<PolygonMode>,
program: Option<&'a Program>,
cull: Option<Face>,
depth_test: Option<Comparison>,
winding_order: WindingOrder,
blend: (SourceFactor, DestFactor),
uniforms: HashMap<UniformLocation, UniformValue<'a>>,
context: Rc<RefCell<ContextInner>>,
}
impl<'a> DrawBuilder<'a> {
pub fn new(context: &Context, vertex_array: &'a VertexArray, draw_mode: DrawMode) -> DrawBuilder<'a> {
// TODO: Make sure `vertex_array` comes from the right context.
DrawBuilder {
vertex_array: vertex_array,
draw_mode: draw_mode,
polygon_mode: None,
program: None,
cull: None,
depth_test: None,
winding_order: WindingOrder::default(),
blend: Default::default(),
uniforms: HashMap::new(),
context: context.inner(),
}
}
pub fn polygon_mode(&mut self, polygon_mode: PolygonMode) -> &mut DrawBuilder<'a> {
self.polygon_mode = Some(polygon_mode);
self
}
pub fn program(&mut self, program: &'a Program) -> &mut DrawBuilder<'a> {
assert!(
self.context.borrow().raw() == program.context,
"Specified program's context does not match draw builder's context"
);
self.program = Some(program);
self
}
pub fn cull(&mut self, face: Face) -> &mut DrawBuilder<'a> {
self.cull = Some(face);
self
}
pub fn depth_test(&mut self, comparison: Comparison) -> &mut DrawBuilder<'a> {
self.depth_test = Some(comparison);
self
}
pub fn winding(&mut self, winding_order: WindingOrder) -> &mut DrawBuilder<'a> {
self.winding_order = winding_order;
self
}
pub fn blend(
&mut self,
source_factor: SourceFactor,
dest_factor: DestFactor
) -> &mut DrawBuilder<'a> {
self.blend = (source_factor, dest_factor);
self
}
/// Sets the value of a uniform variable in the shader program.
///
/// `uniform()` will silently ignore uniform variables that do not exist in the shader program,
/// so it is always safe to speculatively set uniform values even if the shader program may
/// not use that uniform.
///
/// # Panics
///
/// - If the program has not been set using `program()`.
pub fn uniform<T>(
&mut self,
name: &str,
value: T
) -> &mut DrawBuilder<'a>
where T: Into<UniformValue<'a>>
{
let value = value.into();
let program =
self.program.expect("Cannot set a uniform without a shader program");
// TODO: This checking is bad? Or maybe not? I don't remember.
let uniform_location = match program.get_uniform_location(name) {
Some(location) => location,
None => return self,
};
// Add uniform to the uniform map.
self.uniforms.insert(uniform_location, value);
self
}
pub fn draw(&mut self) {
let mut context = self.context.borrow_mut();
let _guard = ::context::ContextGuard::new(context.raw());
context.polygon_mode(self.polygon_mode.unwrap_or_default());
context.use_program(self.program.map(Program::inner));
if let Some(face) = self.cull {
context.enable_server_cull(true);
context.cull_mode(face);
context.winding_order(self.winding_order);
} else {
context.enable_server_cull(false);
}
if let Some(depth_test) = self.depth_test {
context.enable_server_depth_test(true);
context.depth_test(depth_test);
} else {
context.enable_server_depth_test(false);
}
let (source_factor, dest_factor) = self.blend;
context.blend(source_factor, dest_factor);
let mut active_texture = 0;
// Apply uniforms.
for (&location, uniform) in &self.uniforms {
self.apply(uniform, location, &mut active_texture);
}
unsafe {
// TODO: Do a better job tracking VAO and VBO state? I don't know how that would be
// accomplished, but I don't honestly undertand VAOs so maybe I should figure that out
// first.
context.bind_vertex_array(self.vertex_array.vertex_array_name);
if let Some(indices) = self.vertex_array.index_buffer.as_ref() {
gl::draw_elements(
self.draw_mode,
indices.primitive_len as i32,
IndexType::UnsignedInt,
0,
);
} else {
let vertex_len = self.vertex_array.vertex_primitive_len / self.vertex_array.elements_per_vertex;
gl::draw_arrays(
self.draw_mode,
0,
vertex_len as i32,
);
}
}
}
fn apply(&self, uniform: &UniformValue, location: UniformLocation, active_texture: &mut i32) {
match *uniform {
UniformValue::F32(value) => unsafe {
gl::uniform_f32x1(location, value);
},
UniformValue::F32x2((x, y)) => unsafe {
gl::uniform_f32x2(location, x, y);
},
UniformValue::F32x3((x, y, z)) => unsafe {
gl::uniform_f32x3(location, x, y, z);
},
UniformValue::F32x4((x, y, z, w)) => unsafe {
gl::uniform_f32x4(location, x, y, z, w);
},
UniformValue::F32x1v(value) => unsafe {
gl::uniform_f32x1v(location, value.len() as i32, value.as_ptr());
},
UniformValue::F32x3v(value) => unsafe {
gl::uniform_f32x3v(location, value.len() as i32, value.as_ptr() as *const _);
},
UniformValue::F32x4v(value) => unsafe {
gl::uniform_f32x4v(location, value.len() as i32, value.as_ptr() as *const _);
},
UniformValue::I32(value) => unsafe {
gl::uniform_i32x1(location, value);
},
UniformValue::I32x1v(value) => unsafe {
gl::uniform_i32x1v(location, value.len() as i32, value.as_ptr());
},
UniformValue::U32(value) => unsafe {
gl::uniform_u32x1(location, value);
},
UniformValue::Matrix(ref matrix) => match matrix.data.len() {
16 => unsafe {
gl::uniform_matrix_f32x4v(
location,
1,
matrix.transpose.into(),
matrix.data.as_ptr())
},
9 => unsafe {
gl::uniform_matrix_f32x3v(
location,
1,
matrix.transpose.into(),
matrix.data.as_ptr())
},
_ => panic!("Unsupported matrix data length: {}", matrix.data.len()),
},
UniformValue::Texture(texture) => {
unsafe {
texture::set_active_texture(*active_texture as u32);
gl::bind_texture(TextureBindTarget::Texture2d, texture.inner());
gl::uniform_i32x1(location, *active_texture);
}
*active_texture += 1;
}
}
}
}
/// Represents a value for a uniform variable in a shader program.
#[derive(Debug)]
pub enum UniformValue<'a> {
F32(f32),
F32x2((f32, f32)),
F32x3((f32, f32, f32)),
F32x4((f32, f32, f32, f32)),
F32x1v(&'a [f32]),
F32x3v(&'a [[f32; 3]]),
F32x4v(&'a [[f32; 4]]),
I32(i32),
I32x1v(&'a [i32]),
U32(u32),
Matrix(GlMatrix<'a>),
Texture(&'a Texture2d),
}
impl<'a> From<f32> for UniformValue<'a> {
fn from(value: f32) -> UniformValue<'a> {
UniformValue::F32(value)
}
}
impl<'a> From<(f32, f32)> for UniformValue<'a> {
fn from(value: (f32, f32)) -> UniformValue<'a> {
UniformValue::F32x2(value)
}
}
impl<'a> From<(f32, f32, f32)> for UniformValue<'a> {
fn from(value: (f32, f32, f32)) -> UniformValue<'a> {
UniformValue::F32x3(value)
}
}
impl<'a> From<(f32, f32, f32, f32)> for UniformValue<'a> {
fn from(value: (f32, f32, f32, f32)) -> UniformValue<'a> {
UniformValue::F32x4(value)
}
}
impl<'a> From<[f32; 1]> for UniformValue<'a> {
fn from(value: [f32; 1]) -> UniformValue<'a> {
UniformValue::F32(value[0])
}
}
impl<'a> From<[f32; 2]> for UniformValue<'a> {
fn from(value: [f32; 2]) -> UniformValue<'a> {
UniformValue::F32x2((value[0], value[1]))
}
}
impl<'a> From<[f32; 3]> for UniformValue<'a> {
fn from(value: [f32; 3]) -> UniformValue<'a> {
UniformValue::F32x3((value[0], value[1], value[2]))
}
}
impl<'a> From<[f32; 4]> for UniformValue<'a> {
fn from(value: [f32; 4]) -> UniformValue<'a> {
UniformValue::F32x4((value[0], value[1], value[2], value[3]))
}
}
impl<'a> From<&'a [f32]> for UniformValue<'a> {
fn from(value: &'a [f32]) -> UniformValue<'a> {
UniformValue::F32x1v(value)
}
}
impl<'a> From<&'a [[f32; 3]]> for UniformValue<'a> {
fn from(value: &'a [[f32; 3]]) -> UniformValue<'a> {
UniformValue::F32x3v(value)
}
}
impl<'a> From<&'a [[f32; 4]]> for UniformValue<'a> {
fn from(value: &'a [[f32; 4]]) -> UniformValue<'a> {
UniformValue::F32x4v(value)
}
}
impl<'a> From<i32> for UniformValue<'a> {
fn from(from: i32) -> UniformValue<'a> {
UniformValue::I32(from)
}
}
impl<'a> From<&'a [i32]> for UniformValue<'a> {
fn from(from: &'a [i32]) -> UniformValue<'a> {
UniformValue::I32x1v(from)
}
}
impl<'a> From<u32> for UniformValue<'a> {
fn from(from: u32) -> UniformValue<'a> {
UniformValue::U32(from)
}
}
impl<'a> From<GlMatrix<'a>> for UniformValue<'a> {
fn from(matrix: GlMatrix<'a>) -> UniformValue<'a> {
UniformValue::Matrix(matrix)
}
}
impl<'a> From<&'a Texture2d> for UniformValue<'a> {
fn from(from: &'a Texture2d) -> UniformValue<'a> {
UniformValue::Texture(from)
}
}
#[derive(Debug, Clone)]
pub struct GlMatrix<'a> {
pub data: &'a [f32],
pub transpose: bool,
}
|
// Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::errors::Result;
use tremor_script::Value;
pub(crate) mod binary;
pub(crate) mod binflux;
pub(crate) mod influx;
pub(crate) mod json;
pub(crate) mod msgpack;
pub(crate) mod null;
pub(crate) mod statsd;
pub(crate) mod string;
pub(crate) mod syslog;
pub(crate) mod yaml;
const MIME_TYPES: [&str; 8] = [
"application/json",
"application/yaml",
"text/plain",
"text/html",
"application/msgpack",
"application/x-msgpack",
"application/vnd.msgpack",
"application/octet-stream",
];
mod prelude {
pub use super::Codec;
pub use crate::errors::*;
pub use tremor_script::prelude::*;
pub use tremor_script::{Object, Value};
}
/// The codec trait, to encode and decode data
pub trait Codec: Send + Sync {
/// The canonical name for this codec
fn name(&self) -> &str;
/// supported mime types
/// as <base>/<subtype>
///
/// e.g. application/json
///
/// The returned mime types should be unique to this codec
#[cfg(not(tarpaulin_include))]
fn mime_types(&self) -> Vec<&str> {
vec![]
}
/// Decode a binary, into an Value
/// If `None` is returned, no data could be encoded, but we don't exactly triggered an error condition.
///
/// # Errors
/// * if we can't decode the data
fn decode<'input>(
&mut self,
data: &'input mut [u8],
ingest_ns: u64,
) -> Result<Option<Value<'input>>>;
/// Encodes a Value into a binary
///
/// # Errors
/// * If the encoding fails
fn encode(&self, data: &Value) -> Result<Vec<u8>>;
/// Encodes into an existing buffer
///
/// # Errors
/// * when we can't write encode to the given vector
#[cfg(not(tarpaulin_include))]
fn encode_into(&self, data: &Value, dst: &mut Vec<u8>) -> Result<()> {
let mut res = self.encode(data)?;
std::mem::swap(&mut res, dst);
Ok(())
}
/// special clone method for getting clone functionality
/// into a this trait referenced as trait object
/// otherwise we cannot use this type inside structs that need to be `Clone`.
/// See: `crate::codec::rest::BoxedCodec`
fn boxed_clone(&self) -> Box<dyn Codec>;
}
/// Codec lookup function
///
/// # Errors
/// * if the codec doesn't exist
pub fn lookup(name: &str) -> Result<Box<dyn Codec>> {
match name {
"json" => Ok(Box::new(json::Json::<json::Unsorted>::default())),
"json-sorted" => Ok(Box::new(json::Json::<json::Sorted>::default())),
"msgpack" => Ok(Box::new(msgpack::MsgPack {})),
"influx" => Ok(Box::new(influx::Influx {})),
"binflux" => Ok(Box::new(binflux::BInflux {})),
"null" => Ok(Box::new(null::Null {})),
"string" => Ok(Box::new(string::String {})),
"statsd" => Ok(Box::new(statsd::StatsD {})),
"yaml" => Ok(Box::new(yaml::Yaml {})),
"binary" => Ok(Box::new(binary::Binary {})),
"syslog" => Ok(Box::new(syslog::Syslog::utcnow())),
_ => Err(format!("Codec '{}' not found.", name).into()),
}
}
/// Map from Mime types to codecs for all builtin codecs mappable to Mime types
/// these are all safe mappings
/// if you have a specific codec to be used for a more unspecific mime type
/// like statsd for text/plain
/// these must be specified in a source specific `codec_map`
#[must_use]
pub fn builtin_codec_map() -> halfbrown::HashMap<String, Box<dyn Codec>> {
MIME_TYPES
.iter()
.filter_map(|t| Some(((*t).to_string(), by_mime_type(t).ok()?)))
.collect()
}
/// lookup a codec by mime type
///
/// # Errors
/// if no codec could be found for the given mime type
pub fn by_mime_type(mime: &str) -> Result<Box<dyn Codec>> {
match mime {
"application/json" => Ok(Box::new(json::Json::<json::Unsorted>::default())),
"application/yaml" => Ok(Box::new(yaml::Yaml {})),
"text/plain" | "text/html" => Ok(Box::new(string::String {})),
"application/msgpack" | "application/x-msgpack" | "application/vnd.msgpack" => {
Ok(Box::new(msgpack::MsgPack {}))
}
"application/octet-stream" => Ok(Box::new(binary::Binary {})),
_ => Err(format!("No codec found for mime type '{}'", mime).into()),
}
}
#[cfg(test)]
mod test {
#[test]
fn lookup() {
assert!(super::lookup("json").is_ok());
assert!(super::lookup("json-sorted").is_ok());
assert!(super::lookup("msgpack").is_ok());
assert!(super::lookup("influx").is_ok());
assert!(super::lookup("binflux").is_ok());
assert!(super::lookup("null").is_ok());
assert!(super::lookup("string").is_ok());
assert!(super::lookup("statsd").is_ok());
assert!(super::lookup("yaml").is_ok());
assert!(super::lookup("syslog").is_ok());
assert_eq!(
super::lookup("snot").err().unwrap().to_string(),
"Codec 'snot' not found."
)
}
#[test]
fn builtin_codec_map() {
let map = super::builtin_codec_map();
for t in super::MIME_TYPES.iter() {
assert!(map.contains_key(*t));
}
}
#[test]
fn by_mime_type() {
for t in super::MIME_TYPES.iter() {
assert!(super::by_mime_type(t).is_ok());
}
assert_eq!(
super::by_mime_type("application/badger")
.err()
.unwrap()
.to_string(),
"No codec found for mime type 'application/badger'"
);
}
}
|
use vec3::*;
use ray::*;
use util::*;
fn random_in_unit_disk(rng: &mut Rng) -> Vec3<f64> {
loop {
let p = 2.0 * Vec3::new(rng.rand64(), rng.rand64(), 0.0) - Vec3::new(1.0, 1.0, 0.0);
if dot(&p, &p) < 1.0 {
return p;
}
}
}
#[derive(Debug)]
pub struct Camera {
/// Position of the camera.
origin: Vec3<f64>,
/// Lower-left corner of the focus plane in world coordinates.
lower_left_corner: Vec3<f64>,
/// Vector going horizontally (left-to-right) across the focus plane.
horizontal: Vec3<f64>,
/// Vector going vertically (bottom-to-top) across the focus plane.
vertical: Vec3<f64>,
/// ONB for the camera.
u: Vec3<f64>,
v: Vec3<f64>,
w: Vec3<f64>,
lens_radius: f64,
time0: f64,
time1: f64,
}
impl Camera {
/// Create a new camera.
/// vfov is field of view vertically in degrees.
pub fn new(lookfrom: Vec3<f64>,
lookat: Vec3<f64>,
vup: Vec3<f64>,
vfov: f64,
aspect: f64,
aperture: f64,
focus_dist: f64,
t0: f64,
t1: f64)
-> Camera {
let theta = vfov * PI / 180.0;
let half_height = (theta / 2.0).tan();
let half_width = aspect * half_height;
let w = (lookfrom - lookat).unit_vector();
let u = cross(&vup, &w).unit_vector();
let v = cross(&w, &u);
let lower_left_corner = lookfrom - half_width * focus_dist * u -
half_height * focus_dist * v -
focus_dist * w;
let horizontal = 2.0 * half_width * focus_dist * u;
let vertical = 2.0 * half_height * focus_dist * v;
Camera {
origin: lookfrom,
lower_left_corner: lower_left_corner,
horizontal: horizontal,
vertical: vertical,
u: u,
v: v,
w: w,
lens_radius: aperture / 2.0,
time0: t0,
time1: t1,
}
}
pub fn get_ray(&self, rng: &mut Rng, s: f64, t: f64) -> Ray<f64> {
let rd = self.lens_radius * random_in_unit_disk(rng);
let offset = self.u * rd.x + self.v * rd.y;
// Emit the ray at some random time while the shutter is open.
let time = self.time0 + rng.rand64() * (self.time1 - self.time0);
Ray::new_time(self.origin + offset,
self.lower_left_corner + s * self.horizontal + t * self.vertical -
self.origin - offset,
time)
}
}
|
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::Term;
#[native_implemented::function(erlang:erase/1)]
pub fn result(process: &Process, key: Term) -> Term {
process.erase_value_from_key(key)
}
|
//! Tests are based on
//! https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38a.pdf Appendix F
extern crate aes;
extern crate data_encoding;
use data_encoding::HEXLOWER;
use aes::*;
fn as_vec(input: &str) -> Vec<u8> {
HEXLOWER.decode(input.as_bytes()).unwrap()
}
#[test]
fn vec_conversion() {
assert_eq!(
as_vec("ffffff0100"),
vec![255,255,255, 1, 0],
);
}
// F.1.1 ECB-AES128.Encrypt
#[test]
fn ecb_aes_128_encrypt() {
let key = "2b7e151628aed2a6abf7158809cf4f3c";
let blocks = [
[
// Block #1
"6bc1bee22e409f96e93d7e117393172a",
"6bc1bee22e409f96e93d7e117393172a",
"3ad77bb40d7a3660a89ecaf32466ef97",
"3ad77bb40d7a3660a89ecaf32466ef97",
],
[
// Block #2
"ae2d8a571e03ac9c9eb76fac45af8e51",
"ae2d8a571e03ac9c9eb76fac45af8e51",
"f5d3d58503b9699de785895a96fdbaaf",
"f5d3d58503b9699de785895a96fdbaaf",
],
[
// Block #3
"30c81c46a35ce411e5fbc1191a0a52ef",
"30c81c46a35ce411e5fbc1191a0a52ef",
"43b1cd7f598ece23881b00e3ed030688",
"43b1cd7f598ece23881b00e3ed030688",
],
[
// Block #4
"f69f2445df4f9b17ad2b417be66c3710",
"f69f2445df4f9b17ad2b417be66c3710",
"7b0c785e27e8ad3f8223207104725dd4",
"7b0c785e27e8ad3f8223207104725dd4",
],
];
let aes = AES::new(Size::AES128, Mode::ECB, as_vec(key).as_slice(), &[0u8; 16]);
for block in blocks.iter() {
let mut input = as_vec(block[0]);
let output = as_vec(block[3]);
let mut out = input.as_mut_slice();
aes_ecb_encrypt(&aes, out);
assert_eq!(out, output.as_slice());
aes_ecb_decrypt(&aes, out);
assert_eq!(out, as_vec(block[0]).as_slice());
}
}
// F.2.1 CBC-AES128.Encrypt
#[test]
fn cbc_aes_128_encrypt() {
let key = "2b7e151628aed2a6abf7158809cf4f3c";
let iv = "000102030405060708090a0b0c0d0e0f";
let blocks = [
[
// Block #1
"6bc1bee22e409f96e93d7e117393172a",
"6bc1bee22e409f96e93d7e117393172a",
"7649abac8119b246cee98e9b12e9197d",
"7649abac8119b246cee98e9b12e9197d",
],
[
// Block #2
"ae2d8a571e03ac9c9eb76fac45af8e51",
"d86421fb9f1a1eda505ee1375746972c",
"5086cb9b507219ee95db113a917678b2",
"5086cb9b507219ee95db113a917678b2",
],
[
// Block #3
"30c81c46a35ce411e5fbc1191a0a52ef",
"604ed7ddf32efdff7020d0238b7c2a5d",
"73bed6b8e3c1743b7116e69e22229516",
"73bed6b8e3c1743b7116e69e22229516",
],
[
// Block #4
"f69f2445df4f9b17ad2b417be66c3710",
"8521f2fd3c8eef2cdc3da7e5c44ea206",
"3ff1caa1681fac09120eca307586e1a7",
"3ff1caa1681fac09120eca307586e1a7",
],
];
let mut aes = AES::new(
Size::AES128,
Mode::CBC,
as_vec(key).as_slice(),
as_vec(iv).as_slice(),
);
for block in blocks.iter() {
let mut input = as_vec(block[0]);
let output = as_vec(block[3]);
let mut out = input.as_mut_slice();
aes_cbc_encrypt_buffer(&mut aes, out);
assert_eq!(out, output.as_slice());
}
}
// F.2.2 CBC-AES128.Decrypt
#[test]
fn cbc_aes_128_decrypt() {
let key = "2b7e151628aed2a6abf7158809cf4f3c";
let iv = "000102030405060708090a0b0c0d0e0f";
let blocks = [
[
// Block #1
"7649abac8119b246cee98e9b12e9197d",
"7649abac8119b246cee98e9b12e9197d",
"6bc1bee22e409f96e93d7e117393172a",
"6bc1bee22e409f96e93d7e117393172a",
],
[
// Block #2
"5086cb9b507219ee95db113a917678b2",
"5086cb9b507219ee95db113a917678b2",
"d86421fb9f1a1eda505ee1375746972c",
"ae2d8a571e03ac9c9eb76fac45af8e51",
],
[
// Block #3
"73bed6b8e3c1743b7116e69e22229516",
"73bed6b8e3c1743b7116e69e22229516",
"604ed7ddf32efdff7020d0238b7c2a5d",
"30c81c46a35ce411e5fbc1191a0a52ef",
],
[
// Block #4
"3ff1caa1681fac09120eca307586e1a7",
"3ff1caa1681fac09120eca307586e1a7",
"8521f2fd3c8eef2cdc3da7e5c44ea206",
"f69f2445df4f9b17ad2b417be66c3710",
],
];
let mut aes = AES::new(
Size::AES128,
Mode::CBC,
as_vec(key).as_slice(),
as_vec(iv).as_slice(),
);
for block in blocks.iter() {
let mut input = as_vec(block[0]);
let output = as_vec(block[3]);
let mut out = input.as_mut_slice();
aes_cbc_decrypt_buffer(&mut aes, out);
assert_eq!(out, output.as_slice());
}
}
// F.5.1 CTR-AES128.Encrypt
#[test]
fn ctr_aes_128_encrypt() {
let key = "2b7e151628aed2a6abf7158809cf4f3c";
let iv = "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff";
let blocks = [
[
// Block #1
"6bc1bee22e409f96e93d7e117393172a",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
"ec8cdf7398607cb0f2d21675ea9ea1e4",
"874d6191b620e3261bef6864990db6ce",
],
[
// Block #2
"ae2d8a571e03ac9c9eb76fac45af8e51",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdff00",
"362b7c3c6773516318a077d7fc5073ae",
"9806f66b7970fdff8617187bb9fffdff",
],
[
// Block #3
"30c81c46a35ce411e5fbc1191a0a52ef",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdff01",
"6a2cc3787889374fbeb4c81b17ba6c44",
"5ae4df3edbd5d35e5b4f09020db03eab",
],
[
// Block #4
"f69f2445df4f9b17ad2b417be66c3710",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdff02",
"e89c399ff0f198c6d40a31db156cabfe",
"1e031dda2fbe03d1792170a0f3009cee",
],
];
let mut aes = AES::new(
Size::AES128,
Mode::CTR,
as_vec(key).as_slice(),
as_vec(iv).as_slice(),
);
for block in blocks.iter() {
let mut input = as_vec(block[0]);
let output = as_vec(block[3]);
let mut out = input.as_mut_slice();
aes_ctr_xcrypt_buffer(&mut aes, out);
assert_eq!(out, output.as_slice());
}
}
// F.5.2 CTR-AES128.Decrypt
#[test]
fn ctr_aes_128_decrypt() {
let key = "2b7e151628aed2a6abf7158809cf4f3c";
let iv = "f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff";
let blocks = [
[
// Block #1
"874d6191b620e3261bef6864990db6ce",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff",
"ec8cdf7398607cb0f2d21675ea9ea1e4",
"6bc1bee22e409f96e93d7e117393172a",
],
[
// Block #2
"9806f66b7970fdff8617187bb9fffdff",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdff00",
"362b7c3c6773516318a077d7fc5073ae",
"ae2d8a571e03ac9c9eb76fac45af8e51",
],
[
// Block #3
"5ae4df3edbd5d35e5b4f09020db03eab",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdff01",
"6a2cc3787889374fbeb4c81b17ba6c44",
"30c81c46a35ce411e5fbc1191a0a52ef",
],
[
// Block #4
"1e031dda2fbe03d1792170a0f3009cee",
"f0f1f2f3f4f5f6f7f8f9fafbfcfdff02",
"e89c399ff0f198c6d40a31db156cabfe",
"f69f2445df4f9b17ad2b417be66c3710",
],
];
let mut aes = AES::new(
Size::AES128,
Mode::CTR,
as_vec(key).as_slice(),
as_vec(iv).as_slice(),
);
for block in blocks.iter() {
let mut input = as_vec(block[0]);
let output = as_vec(block[3]);
let mut out = input.as_mut_slice();
aes_ctr_xcrypt_buffer(&mut aes, out);
assert_eq!(out, output.as_slice());
}
}
|
use glam::Vec2;
use legion::prelude::*;
use super::input::InputQueue;
use super::Game;
use crate::engine::components::all::*;
use crate::engine::events::timed_event::*;
use crate::engine::resources::map_data::*;
impl Game {
pub fn insert_cores(&mut self, cores: Vec<(usize, CoreData)>) {
let mut replication_ids = Vec::new();
let component_set = cores
.into_iter()
.map(|(team_id, core)| {
let replicated = Replicated::new_for_game(self, ReplicatedEntityType::Core);
replication_ids.push(replicated.id.clone());
(
Position(core.pos),
Team::new(TeamId(team_id as u32)),
replicated,
//TODO
// Collider,
// Provides Vision
// Visible
)
})
.collect::<Vec<_>>();
let inserted_entities = self.world.insert((), component_set);
for (id, entity) in replication_ids.into_iter().zip(inserted_entities) {
self.replicated_entities.insert(id, *entity);
}
}
pub fn insert_towers(&mut self, towers: Vec<(usize, TowerData)>) {
let mut replication_ids = Vec::new();
let component_set = towers
.into_iter()
.map(|(team_id, tower)| {
let replicated = Replicated::new_for_game(self, ReplicatedEntityType::Tower);
replication_ids.push(replicated.id.clone());
(
Position(tower.pos),
Rotation(0.),
// TODO
// Attacks
// Search Hostile
// Health
// Collider
// Provides Vision
// Visible
Team::new(TeamId(team_id as u32)),
replicated,
)
})
.collect::<Vec<_>>();
let inserted_entities = self.world.insert((), component_set);
for (id, entity) in replication_ids.into_iter().zip(inserted_entities) {
self.replicated_entities.insert(id, *entity);
}
}
pub fn insert_spawners(&mut self, spawners: Vec<(usize, SpawnerData)>) {
let component_set = spawners.into_iter().map(|(team_id, spawner_data)| {
(
Position(spawner_data.pos),
Rotation(0.),
Team::new(TeamId(team_id as u32)),
Waypoints::new(spawner_data.waypoints),
)
});
// TODO:
// Attach TimedEvents to these spawners
// FOR TESTING ONLY!
self.timed_events.push(TimedEvent {
event_type: TimedEventType::Repeating(300),
execute_frame: 300,
name: "Test Event".to_string(),
execute: |_unused: &mut Self| {
println!("test event");
},
event_data: None,
});
self.world.insert((), component_set);
}
pub fn insert_player(&mut self, player_id: PlayerId) -> Entity {
let replicated = Replicated::new_for_game(self, ReplicatedEntityType::Character);
let entity = self
.world
.insert(
(),
std::iter::once((
Position(Vec2::new(1., 1.)),
Rotation(0.),
replicated,
PlayerControlled { id: player_id },
Moving {
base_speed: 125.,
target: MoveTarget::None,
},
Attacking::new(150., 1., 0.35, AttackingType::Projectile),
)),
)
.first()
.unwrap();
self.replicated_entities
.insert(replicated.id.clone(), *entity);
self.player_inputs.insert(player_id, InputQueue::new());
*entity
}
}
|
use std::{collections::HashSet, io};
#[derive(Debug, Clone, Copy)]
enum Direction {
Up,
Down,
Left,
Right,
}
#[derive(Debug, Clone, Copy)]
struct Movement {
dir: Direction,
len: i32,
}
#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq)]
struct Position {
x: i32,
y: i32,
}
impl Position {
fn move_dir(self, dir: Direction) -> Self {
let mut x = self.x;
let mut y = self.y;
match dir {
Direction::Up => y += 1,
Direction::Down => y -= 1,
Direction::Left => x -= 1,
Direction::Right => x += 1,
}
Self { x, y }
}
fn follow(self, other: Position) -> Self {
let diff_x = other.x - self.x;
let diff_y = other.y - self.y;
match (diff_x, diff_y) {
(0 | -1 | 1, 0 | -1 | 1) => self,
(0, 2) => self.move_dir(Direction::Up),
(0, -2) => self.move_dir(Direction::Down),
(-2, 0) => self.move_dir(Direction::Left),
(2, 0) => self.move_dir(Direction::Right),
(-2 | -1, 2) | (-2, 1) => self.move_dir(Direction::Left).move_dir(Direction::Up),
(1 | 2, 2) | (2, 1) => self.move_dir(Direction::Right).move_dir(Direction::Up),
(2, -1 | -2) | (1, -2) => self.move_dir(Direction::Right).move_dir(Direction::Down),
(-2 | -1, -2) | (-2, -1) => self.move_dir(Direction::Left).move_dir(Direction::Down),
_ => unreachable!(),
}
}
}
// X X X X X
// X X X X X
// X X S X X
// X X X X X
// X X X X X
fn parse_line(line: &str) -> nom::IResult<&str, Movement> {
use nom::{branch, bytes, character, combinator, sequence};
combinator::map(
sequence::separated_pair(
branch::alt((
combinator::map(bytes::complete::tag("U"), |_| Direction::Up),
combinator::map(bytes::complete::tag("D"), |_| Direction::Down),
combinator::map(bytes::complete::tag("L"), |_| Direction::Left),
combinator::map(bytes::complete::tag("R"), |_| Direction::Right),
)),
character::complete::space1,
combinator::map_res(character::complete::digit1, str::parse),
),
|(dir, len)| Movement { dir, len },
)(line)
}
fn print_map(knots: &[Position]) {
for y in -20..=20 {
for x in -20..=20 {
if let Some(knot) = knots.iter().position(|pos| pos.y == y && pos.x == x) {
print!("{knot} ");
} else {
print!(". ");
}
}
println!();
}
}
fn main() -> io::Result<()> {
let positions = itertools::process_results(io::stdin().lines(), |it| {
let mut knots = [Position::default(); 10];
let mut positions: HashSet<Position> = HashSet::default();
positions.insert(knots[knots.len() - 1]);
for line in it {
let (remaining, head_move) = parse_line(&line)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid move"))?;
assert_eq!("", remaining);
for _ in 0..head_move.len {
knots[0] = knots[0].move_dir(head_move.dir);
for i in 1..knots.len() {
knots[i] = knots[i].follow(knots[i - 1]);
}
positions.insert(knots[knots.len() - 1]);
}
// print_map(&knots);
}
Ok::<_, io::Error>(positions)
})??;
let answer = positions.len();
println!("{answer}");
Ok(())
}
|
use std::time::SystemTime;
const NN: usize = 312;
const MM: usize = 156;
const MATRIX_A: u64 = 0xB5026F5AA96619E9;
const UM: u64 = 0xFFFFFFFF80000000;
const LM: u64 = 0x7FFFFFFF;
const F: u64 = 6364136223846793005;
const MAG01: [u64; 2] = [0, MATRIX_A];
pub struct Random {
mt: [u64; NN],
index: usize,
}
impl Random {
pub fn new(seed: u64) -> Self {
let mut res = Self {
mt: [0u64; NN],
index: NN,
};
res.mt[0] = seed;
for i in 1..NN {
res.mt[i] = F
.wrapping_mul(res.mt[i - 1] ^ (res.mt[i - 1] >> 62))
.wrapping_add(i as u64);
}
res
}
pub fn gen(&mut self) -> u64 {
if self.index == NN {
for i in 0..(NN - MM) {
let x = (self.mt[i] & UM) | (self.mt[i + 1] & LM);
self.mt[i] = self.mt[i + MM] ^ (x >> 1) ^ MAG01[(x & 1) as usize];
}
for i in (NN - MM)..(NN - 1) {
let x = (self.mt[i] & UM) | (self.mt[i + 1] & LM);
self.mt[i] = self.mt[i + MM - NN] ^ (x >> 1) ^ MAG01[(x & 1) as usize];
}
let x = (self.mt[NN - 1] & UM) | (self.mt[0] & LM);
self.mt[NN - 1] = self.mt[MM - 1] ^ (x >> 1) ^ MAG01[(x & 1) as usize];
self.index = 0;
}
let mut x = self.mt[self.index];
self.index += 1;
x ^= (x >> 29) & 0x5555555555555555;
x ^= (x << 17) & 0x71D67FFFEDA60000;
x ^= (x << 37) & 0xFFF7EEE000000000;
x ^= x >> 43;
x
}
pub fn next(&mut self, n: u64) -> u64 {
self.gen() % n
}
pub fn next_bounds(&mut self, f: u64, t: u64) -> u64 {
f + self.next(t - f + 1)
}
}
static mut RAND: Option<Random> = None;
pub fn random() -> &'static mut Random {
unsafe {
if RAND.is_none() {
RAND = Some(Random::new(
(SystemTime::UNIX_EPOCH.elapsed().unwrap().as_nanos() & 0xFFFFFFFFFFFFFFFF) as u64,
));
}
RAND.as_mut().unwrap()
}
}
pub trait Shuffle {
fn shuffle(&mut self);
}
impl<T> Shuffle for &mut [T] {
fn shuffle(&mut self) {
let len = self.len();
for i in 0..len {
let at = (random().gen() % ((i + 1) as u64)) as usize;
self.swap(i, at);
}
}
}
|
fn main() {
let contents =
std::fs::read_to_string("/injected_dir/injected_file").expect("read injected file");
assert_eq!(contents, "injected file contents");
}
|
#[macro_use]
extern crate slog;
extern crate slog_term;
extern crate bytes;
//extern crate byteorder;
extern crate futures;
extern crate tokio_io;
extern crate tokio_service;
pub mod error;
pub mod ipc;
pub mod util; |
//! Rust encoder and decoder in order to work with the Confluent schema registry.
//!
//! This crate contains ways to handle encoding and decoding of messages making use of the
//! [confluent schema-registry]. This happens in a way that is compatible to the
//! [confluent java serde]. As a result it becomes easy to work with the same data in both the jvm
//! and rust.
//!
//! [confluent schema-registry]: https://docs.confluent.io/current/schema-registry/docs/index.html
//! [confluent java serde]: https://github.com/confluentinc/schema-registry/tree/master/avro-serde/src/main/java/io/confluent/kafka/streams/serdes/avro
//!
//! Both the Decoder and the Encoder have a cache to allow re-use of the Schema objects used for
//! the avro transitions.
//!
//! For Encoding data it's possible to supply a schema else the latest available schema will be used.
//! For Decoding it works the same as the Java part, using the id encoded in the bytes, the
//! correct schema will be fetched and used to decode the message to a avro_rs::types::Value.
//!
//! Resulting errors are SRCError, besides the error they also contain a .cached which tells whether
//! the error is cached or not. Another property added to the error is retriable, in some cases, like
//! when the network fails it might be worth to retry the same function. The library itself doesn't
//! automatically does retries.
//!
//! [avro-rs]: https://crates.io/crates/avro-rs
#[macro_use] extern crate failure;
pub mod schema_registry;
mod value_fixed_fix;
use crate::value_fixed_fix::FixedFixer;
use avro_rs::schema::Name;
use avro_rs::to_value;
use avro_rs::types::{Record, ToAvro, Value};
use avro_rs::{from_avro_datum, to_avro_datum, Schema};
use byteorder::{BigEndian, ByteOrder, ReadBytesExt};
use schema_registry::SRCError;
use schema_registry::{get_schema_by_id, get_schema_by_subject, get_subject, SubjectNameStrategy};
use serde::ser::Serialize;
use std::collections::hash_map::RandomState;
use std::collections::HashMap;
use std::io::Cursor;
/// A decoder used to transform bytes to a Value object
///
/// The main purpose of having this struct is to be able to cache the schema's. Because the need to
/// be retrieved over http from the schema registry, and we can already see from the bytes which
/// schema we should use, this can save a lot of unnecessary calls.
/// Errors are also stored to the cache, because they may not be recoverable. A function is
/// available to remove the errors from the cache. To get the value avro_rs is used.
///
/// For both the key and the payload/key it's possible to use the schema registry, this struct supports
/// both. But only using the SubjectNameStrategy::TopicNameStrategy it has to be made explicit
/// whether it's actual used as key or value.
///
/// ```
/// # use mockito::{mock, server_address};
/// # use schema_registry_converter::Decoder;
/// # use avro_rs::types::Value;
///
/// let _m = mock("GET", "/schemas/ids/1")
/// .with_status(200)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
/// .create();
///
/// let mut decoder = Decoder::new(format!("http://{}", server_address()));
/// let heartbeat = decoder.decode(Some(&[0,0,0,0,1,6]));
///
/// assert_eq!(heartbeat, Ok(Value::Record(vec!(("beat".to_string(), Value::Long(3))))))
/// ```
#[derive(Debug)]
pub struct Decoder {
schema_registry_url: String,
cache: &'static mut HashMap<u32, Result<Schema, SRCError>, RandomState>,
}
impl Decoder {
/// Creates a new decoder which will use the supplied url to fetch the schema's since the schema
/// needed is encoded in the binary, independent of the SubjectNameStrategy we don't need any
/// additional data. It's possible for recoverable errors to stay in the cash, when a result
/// comes back as an error you can use remove_errors_from_cache to clean the cache, keeping the
/// correctly fetched schema's
pub fn new(schema_registry_url: String) -> Decoder {
let new_cache = Box::new(HashMap::new());
Decoder {
schema_registry_url,
cache: Box::leak(new_cache),
}
}
/// Remove al the errors from the cache, you might need to/want to run this when a recoverable
/// error is met. Errors are also cashed to prevent trying to get schema's that either don't
/// exist or can't be parsed.
///
/// ```
/// # use mockito::{mock, server_address};
/// # use schema_registry_converter::Decoder;
/// # use schema_registry_converter::schema_registry::SRCError;
/// # use avro_rs::types::Value;
///
/// let mut decoder = Decoder::new(format!("http://{}", server_address()));
/// let bytes = [0,0,0,0,2,6];
///
/// let _m = mock("GET", "/schemas/ids/2")
/// .with_status(404)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"error_code":40403,"message":"Schema not found"}"#)
/// .create();
/// let heartbeat = decoder.decode(Some(&bytes));
/// assert_eq!(heartbeat, Err(SRCError::new("Did not get a 200 response code but 404 instead", None, false).into_cache()));
/// let _m = mock("GET", "/schemas/ids/2")
/// .with_status(200)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
/// .create();
///
/// let heartbeat = decoder.decode(Some(&bytes));
/// assert_eq!(heartbeat, Err(SRCError::new("Did not get a 200 response code but 404 instead", None, false).into_cache()));
///
/// decoder.remove_errors_from_cache();
///
/// let heartbeat = decoder.decode(Some(&bytes));
/// assert_eq!(heartbeat, Ok(Value::Record(vec!(("beat".to_string(), Value::Long(3))))))
/// ```
pub fn remove_errors_from_cache(&mut self) {
self.cache.retain(|_, v| v.is_ok());
}
/// Decodes bytes into a value.
/// The choice to use Option<&[u8]> as type us made so it plays nice with the BorrowedMessage
/// struct from rdkafka, for example if we have m: &'a BorrowedMessage and decoder: &'a mut
/// Decoder we can use decoder.decode(m.payload()) to decode the payload or
/// decoder.decode(m.key()) to get the decoded key.
///
/// ```no_run
/// # use rdkafka::message::{Message, BorrowedMessage};
/// # use schema_registry_converter::Decoder;
/// # use avro_rs::types::Value;
/// fn get_value<'a>(
/// msg: &'a BorrowedMessage,
/// decoder: &'a mut Decoder,
/// ) -> Value{
/// match decoder.decode(msg.payload()){
/// Ok(v) => v,
/// Err(e) => panic!("Error getting value: {}", e),
/// }
/// }
/// ```
pub fn decode(&mut self, bytes: Option<&[u8]>) -> Result<Value, SRCError> {
match bytes {
None => Ok(Value::Null),
Some(p) if p.len() > 4 && p[0] == 0 => self.deserialize(p),
Some(p) => Ok(Value::Bytes(p.to_vec())),
}
}
/// The actual deserialization trying to get the id from the bytes to retrieve the schema, and
/// using a reader transforms the bytes to a value.
fn deserialize<'a>(&'a mut self, bytes: &'a [u8]) -> Result<Value, SRCError> {
let schema = self.get_schema(bytes);
let mut reader = Cursor::new(&bytes[5..]);
match schema {
Ok(v) => match from_avro_datum(&v, &mut reader, None) {
Ok(v) => Ok(v),
Err(e) => Err(SRCError::non_retryable_from_err(
e,
"Could not transform bytes using schema",
)),
},
Err(e) => Err(e.clone()),
}
}
/// Decodes bytes into a value.
/// Also gives back the name of the schema used as a way to match with a struct to deserialize.
pub fn decode_with_name(&mut self, bytes: Option<&[u8]>) -> Result<(Name, Value), SRCError> {
match bytes {
None => Ok((Name::new("null"), Value::Null)),
Some(p) if p.len() > 4 && p[0] == 0 => self.deserialize_with_name(p),
Some(p) => Ok((Name::new("bytes"), Value::Bytes(p.to_vec()))),
}
}
/// The actual deserialization trying to get the id from the bytes to retrieve the schema, and
/// using a reader transforms the bytes to a value.
fn deserialize_with_name<'a>(&'a mut self, bytes: &'a [u8]) -> Result<(Name, Value), SRCError> {
let schema = self.get_schema(bytes);
let mut reader = Cursor::new(&bytes[5..]);
match schema {
Ok(v) => match from_avro_datum(&v, &mut reader, None) {
Ok(val) => Ok((get_name(v), val)),
Err(e) => Err(SRCError::new(
"Could not transform bytes using schema",
Some(&e.to_string()),
false,
)),
},
Err(e) => Err(e.clone()),
}
}
fn get_schema(&mut self, bytes: &[u8]) -> &mut Result<Schema, SRCError> {
let mut buf = &bytes[1..5];
let id = buf.read_u32::<BigEndian>().unwrap();
let sr = &self.schema_registry_url;
self.cache
.entry(id)
.or_insert_with(|| match get_schema_by_id(id, sr) {
Ok(v) => Ok(v),
Err(e) => Err(e.into_cache()),
})
}
}
/// An encoder used to transform a Value object to bytes
///
/// The main purpose of having this struct is to be able to cache the schema's. Because the need to
/// be retrieved over http from the schema registry, and we can already see from the bytes which
/// schema we should use, this can save a lot of unnecessary calls.
/// Errors are also stored to the cache, because they may not be recoverable. A function is
/// available to remove the errors from the cache. To get the value avro_rs is used.
///
/// For both the key and the payload/key it's possible to use the schema registry, this struct supports
/// both. But only using the SubjectNameStrategy::TopicNameStrategy it has to be made explicit
/// whether it's actual used as key or value.
///
/// ```
/// # use mockito::{mock, server_address};
/// # use schema_registry_converter::Encoder;
/// # use schema_registry_converter::schema_registry::SubjectNameStrategy;
/// # use avro_rs::types::Value;
///
/// let _m = mock("GET", "/subjects/heartbeat-value/versions/latest")
/// .with_status(200)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"subject":"heartbeat-value","version":1,"id":3,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
/// .create();
///
/// let _m = mock("GET", "/subjects/heartbeat-key/versions/latest")
/// .with_status(200)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"subject":"heartbeat-value","version":1,"id":4,"schema":"{\"type\":\"record\",\"name\":\"Name\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"name\",\"type\":\"string\",\"avro.java.string\":\"String\"}]}"}"#)
/// .create();
///
/// let mut encoder = Encoder::new(server_address().to_string());
///
/// let key_strategy = SubjectNameStrategy::TopicNameStrategy("heartbeat".into(), true);
/// let bytes = encoder.encode(vec!(("name", Value::String("Some name".to_owned()))), &key_strategy);
///
/// assert_eq!(bytes, Ok(vec!(0, 0, 0, 0, 4, 18, 83, 111, 109, 101, 32, 110, 97, 109, 101)));
///
/// let value_strategy = SubjectNameStrategy::TopicNameStrategy("heartbeat".into(), false);
/// let bytes = encoder.encode(vec!(("beat", Value::Long(3))), &value_strategy);
///
/// assert_eq!(bytes, Ok(vec!(0,0,0,0,3,6)))
/// ```
#[derive(Debug)]
pub struct Encoder {
schema_registry_url: String,
cache: &'static mut HashMap<String, Result<(Schema, u32), SRCError>, RandomState>,
}
impl Encoder {
/// Creates a new encoder which will use the supplied url to fetch the schema's. The schema's
/// need to be retrieved together with the id, in order for a consumer to decode the bytes.
/// For the encoding several strategies are available in the java client, all three of them are
/// supported. The schema's does have to be present in the schema registry already. This is
/// unlike the Java client with wich it's possible to update/upload the schema when it's not
/// present yet. While it may be added to this library, it's also not hard to do it separately.
/// New schema's can set by doing a post at /subjects/{subject}/versions.
pub fn new(schema_registry_url: String) -> Encoder {
let new_cache = Box::new(HashMap::new());
Encoder {
schema_registry_url,
cache: Box::leak(new_cache),
}
}
/// Remove al the errors from the cache, you might need to/want to run this when a recoverable
/// error is met. Errors are also cashed to prevent trying to get schema's that either don't
/// exist or can't be parsed.
///
/// ```
/// # use mockito::{mock, server_address};
/// # use schema_registry_converter::Encoder;
/// # use schema_registry_converter::schema_registry::SubjectNameStrategy;
/// # use schema_registry_converter::schema_registry::SRCError;
/// # use avro_rs::types::Value;
///
/// let mut encoder = Encoder::new(server_address().to_string());
/// let strategy = SubjectNameStrategy::RecordNameStrategy("nl.openweb.data.Heartbeat".into());
///
/// let _m = mock("GET", "/subjects/nl.openweb.data.Heartbeat/versions/latest")
/// .with_status(404)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"error_code":40403,"message":"Schema not found"}"#)
/// .create();
///
/// let bytes = encoder.encode(vec!(("beat", Value::Long(3))), &strategy);
/// assert_eq!(bytes, Err(SRCError::new("Did not get a 200 response code but 404 instead", None, false).into_cache()));
///
/// let _m = mock("GET", "/subjects/nl.openweb.data.Heartbeat/versions/latest")
/// .with_status(200)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"subject":"heartbeat-value","version":1,"id":4,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
/// .create();
///
/// let bytes = encoder.encode(vec!(("beat", Value::Long(3))), &strategy);
/// assert_eq!(bytes, Err(SRCError::new("Did not get a 200 response code but 404 instead", None, false).into_cache()));
///
/// encoder.remove_errors_from_cache();
///
/// let bytes = encoder.encode(vec!(("beat", Value::Long(3))), &strategy);
/// assert_eq!(bytes, Ok(vec!(0,0,0,0,4,6)))
/// ```
pub fn remove_errors_from_cache(&mut self) {
self.cache.retain(|_, v| v.is_ok());
}
/// Encodes a vector of values to bytes. The corrects values of the 'keys' depend on the schema
/// being fetched at runtime. For example you might agree on a schema with a consuming party and
/// /or upload a schema to the schema registry before starting the program. In the future an
/// 'encode with schema' might be added which makes it easier to make sure the schema will
/// become available in the correct way.
///
/// ```
/// # use mockito::{mock, server_address};
/// # use schema_registry_converter::Encoder;
/// # use schema_registry_converter::schema_registry::SubjectNameStrategy;
/// # use avro_rs::types::Value;
///
/// let _m = mock("GET", "/subjects/heartbeat-nl.openweb.data.Heartbeat/versions/latest")
/// .with_status(200)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"subject":"heartbeat-value","version":1,"id":3,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
/// .create();
///
/// let mut encoder = Encoder::new(server_address().to_string());
/// let strategy = SubjectNameStrategy::TopicRecordNameStrategy("heartbeat".into(), "nl.openweb.data.Heartbeat".into());
/// let bytes = encoder.encode(vec!(("beat", Value::Long(3))), &strategy);
///
/// assert_eq!(bytes, Ok(vec!(0,0,0,0,3,6)))
/// ```
pub fn encode(
&mut self,
values: Vec<(&'static str, Value)>,
subject_name_strategy: &SubjectNameStrategy,
) -> Result<Vec<u8>, SRCError> {
let schema_and_id = self.get_schema_and_id(subject_name_strategy);
match schema_and_id {
Ok((schema, id)) => to_bytes(&schema, *id, values),
Err(e) => Err(e.clone()),
}
}
/// Encodes struct to bytes. The corrects values of the 'keys' depend on the schema being
/// fetched at runtime. For example you might agree on a schema with a consuming party and
/// /or upload a schema to the schema registry before starting the program. In the future an
/// 'encode with schema' might be added which makes it easier to make sure the schema will
/// become available in the correct way.
///
/// ```
/// # use mockito::{mock, server_address};
/// # use schema_registry_converter::Encoder;
/// # use schema_registry_converter::schema_registry::SubjectNameStrategy;
/// # use serde::Serialize;
/// # use avro_rs::types::Value;
///
/// let _m = mock("GET", "/subjects/heartbeat-nl.openweb.data.Heartbeat/versions/latest")
/// .with_status(200)
/// .with_header("content-type", "application/vnd.schemaregistry.v1+json")
/// .with_body(r#"{"subject":"heartbeat-value","version":1,"id":3,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
/// .create();
///
/// #[derive(Serialize)]
/// struct Heartbeat {
/// beat: i64,
/// }
///
/// let mut encoder = Encoder::new(server_address().to_string());
/// let strategy = SubjectNameStrategy::TopicRecordNameStrategy("heartbeat".into(), "nl.openweb.data.Heartbeat".into());
/// let bytes = encoder.encode_struct(Heartbeat{beat: 3}, &strategy);
///
/// assert_eq!(bytes, Ok(vec!(0,0,0,0,3,6)))
/// ```
pub fn encode_struct(
&mut self,
item: impl Serialize,
subject_name_strategy: &SubjectNameStrategy,
) -> Result<Vec<u8>, SRCError> {
let schema_and_id = self.get_schema_and_id(subject_name_strategy);
match schema_and_id {
Ok((schema, id)) => item_to_bytes(&schema, *id, item),
Err(e) => Err(e.clone()),
}
}
fn get_schema_and_id(
&mut self,
subject_name_strategy: &SubjectNameStrategy,
) -> &mut Result<(Schema, u32), SRCError> {
let schema_registry_url = &self.schema_registry_url;
self.cache
.entry(get_subject(subject_name_strategy))
.or_insert_with(|| {
match get_schema_by_subject(schema_registry_url, &subject_name_strategy) {
Ok(v) => Ok(v),
Err(e) => Err(e.into_cache()),
}
})
}
}
fn to_payload<T: ToAvro>(schema: &Schema, id: u32, record: T) -> Result<Vec<u8>, SRCError> {
let mut payload = vec![0u8];
let mut buf = [0u8; 4];
BigEndian::write_u32(&mut buf, id);
payload.extend_from_slice(&buf);
match to_avro_datum(schema, record) {
Ok(v) => {
payload.extend_from_slice(v.as_slice());
Ok(payload)
}
Err(e) => Err(SRCError::non_retryable_from_err(
e,
"Could not get avro bytes",
)),
}
}
/// Using the schema with a vector of values the values will be correctly deserialized according to
/// the avro specification.
fn to_bytes(
schema: &Schema,
id: u32,
values: Vec<(&'static str, Value)>,
) -> Result<Vec<u8>, SRCError> {
let mut record = match Record::new(schema) {
Some(v) => v,
None => {
return Err(SRCError::new(
"Could not create record from schema",
None,
false,
))
}
};
for value in values {
record.put(value.0, value.1)
}
to_payload(schema, id, record)
}
/// Using the schema with an item implementing serialize the item will be correctly deserialized
/// according to the avro specification.
fn item_to_bytes(schema: &Schema, id: u32, item: impl Serialize) -> Result<Vec<u8>, SRCError> {
let record = match to_value(item)
.map_err(|e| SRCError::non_retryable_from_err(e, "Could not transform to avro_rs value"))
.and_then(|r| r.fix_fixed(schema))
.map(|r| r.resolve(schema))
{
Ok(Ok(v)) => v,
Ok(Err(e)) => return Err(SRCError::non_retryable_from_err(e, "Failed to resolve")),
Err(e) => return Err(e),
};
to_payload(schema, id, record)
}
fn get_name(schema: &Schema) -> Name {
match schema {
Schema::Record { name: n, .. } => n.clone(),
_ => Name::new("no record"),
}
}
#[test]
fn to_bytes_no_record() {
let schema = Schema::Boolean;
let result = to_bytes(&schema, 5, vec![("beat", Value::Long(3))]);
assert_eq!(
result,
Err(SRCError::new(
"Could not create record from schema",
None,
false,
))
)
}
#[test]
fn to_bytes_no_transfer_wrong() {
let schema = Schema::parse_str(r#"{"type":"record","name":"Name","namespace":"nl.openweb.data","fields":[{"name":"name","type":"string","avro.java.string":"String"}]}"#).unwrap();
let result = to_bytes(&schema, 5, vec![("beat", Value::Long(3))]);
assert_eq!(
result,
Err(SRCError::new(
"Could not get avro bytes",
Some("Validation error: value does not match schema"),
false,
))
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::schema_registry::SuppliedSchema;
use avro_rs::from_value;
use mockito::{mock, server_address};
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize, Serialize)]
struct Heartbeat {
beat: i64,
}
#[derive(Serialize)]
struct NoWayAvro {
map: HashMap<u32, u32>,
}
#[derive(Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Clone, Deserialize, Serialize)]
pub enum Atype {
#[serde(rename = "AUTO")]
Auto,
#[serde(rename = "MANUAL")]
Manual,
}
impl Default for Atype {
fn default() -> Self {
Atype::Auto
}
}
pub type Uuid = [u8; 16];
#[serde(default)]
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)]
pub struct ConfirmAccountCreation {
pub id: Uuid,
pub a_type: Atype,
}
impl Default for ConfirmAccountCreation {
fn default() -> ConfirmAccountCreation {
ConfirmAccountCreation {
id: Uuid::default(),
a_type: Atype::Auto,
}
}
}
#[test]
fn display_decoder() {
let decoder = Decoder::new(server_address().to_string());
assert_eq!(
"Decoder { schema_registry_url: \"127.0.0.1:1234\", cache: {} }".to_owned(),
format!("{:?}", decoder)
)
}
#[test]
fn test_decoder_default() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(Some(&[0, 0, 0, 0, 1, 6]));
assert_eq!(
heartbeat,
Ok(Value::Record(vec![("beat".to_string(), Value::Long(3))]))
);
let item = match from_value::<Heartbeat>(&heartbeat.unwrap()) {
Ok(h) => h,
Err(_) => unreachable!(),
};
assert_eq!(item.beat, 3i64);
}
#[test]
fn test_decoder_with_name() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode_with_name(Some(&[0, 0, 0, 0, 1, 6]));
let item = match heartbeat {
Ok((name, value)) => match name.name.as_str() {
"Heartbeat" => match name.namespace {
Some(namespace) => match namespace.as_str() {
"nl.openweb.data" => from_value::<Heartbeat>(&value).unwrap(),
ns => panic!("Unexpected namespace {}", ns),
},
None => panic!("No namespace, was expected"),
},
name => panic!("Unexpected name {}", name),
},
Err(_) => unreachable!(),
};
assert_eq!(item.beat, 3i64);
}
#[test]
fn test_decoder_no_bytes() {
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(None);
assert_eq!(heartbeat, Ok(Value::Null))
}
#[test]
fn test_decoder_with_name_no_bytes() {
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode_with_name(None);
assert_eq!(heartbeat, Ok((Name::new("null"), Value::Null)))
}
#[test]
fn test_decoder_magic_byte_not_present() {
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(Some(&[1, 0, 0, 0, 1, 6]));
assert_eq!(heartbeat, Ok(Value::Bytes(vec![1, 0, 0, 0, 1, 6])))
}
#[test]
fn test_decoder_with_name_magic_byte_not_present() {
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode_with_name(Some(&[1, 0, 0, 0, 1, 6]));
assert_eq!(
heartbeat,
Ok((Name::new("bytes"), Value::Bytes(vec![1, 0, 0, 0, 1, 6])))
)
}
#[test]
fn test_decoder_not_enough_bytes() {
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(Some(&[0, 0, 0, 0]));
assert_eq!(heartbeat, Ok(Value::Bytes(vec![0, 0, 0, 0])))
}
#[test]
fn test_decoder_wrong_data() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(Some(&[0, 0, 0, 0, 1]));
assert_eq!(
heartbeat,
Err(SRCError::new(
"Could not transform bytes using schema",
Some("failed to fill whole buffer"),
false,
))
)
}
#[test]
fn test_decoder_with_name_wrong_data() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode_with_name(Some(&[0, 0, 0, 0, 1]));
assert_eq!(
heartbeat,
Err(SRCError::new(
"Could not transform bytes using schema",
Some("failed to fill whole buffer"),
false,
))
)
}
#[test]
fn test_decoder_no_json_response() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(Some(&[0, 0, 0, 0, 1, 6]));
assert_eq!(
heartbeat,
Err(SRCError::new(
"Invalid json string",
Some("expected `:` at line 1 column 130"),
false
)
.into_cache())
)
}
#[test]
fn test_decoder_with_name_no_json_response() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode_with_name(Some(&[0, 0, 0, 0, 1, 6]));
assert_eq!(
heartbeat,
Err(SRCError::new(
"Invalid json string",
Some("expected `:` at line 1 column 130"),
false
)
.into_cache())
)
}
#[test]
fn test_decoder_schema_registry_unavailable() {
let mut decoder = Decoder::new("http://bogus".to_string());
let heartbeat = decoder.decode(Some(&[0, 0, 0, 10, 1, 6]));
assert_eq!(
heartbeat,
Err(SRCError::new(
"error performing get to schema registry",
Some("Couldn\'t resolve host name"),
true,
)
.into_cache())
)
}
#[test]
fn test_decoder_default_no_schema_in_response() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"no-schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(Some(&[0, 0, 0, 0, 1, 6]));
assert_eq!(
heartbeat,
Err(SRCError::new("Could not get raw schema from response", None, false).into_cache())
)
}
#[test]
fn test_decoder_default_wrong_schema_in_response() {
let _m = mock("GET", "/schemas/ids/1")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\"}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let heartbeat = decoder.decode(Some(&[0, 0, 0, 0, 1, 6]));
assert_eq!(
heartbeat,
Err(SRCError::new(
"Could not parse schema",
Some("Failed to parse schema: No `fields` in record"),
false,
)
.into_cache())
)
}
#[test]
fn test_decoder_fixed_with_enum() {
let _m = mock("GET", "/schemas/ids/6")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"ConfirmAccountCreation\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"id\",\"type\":{\"type\":\"fixed\",\"name\":\"Uuid\",\"size\":16}},{\"name\":\"a_type\",\"type\":{\"type\":\"enum\",\"name\":\"Atype\",\"symbols\":[\"AUTO\",\"MANUAL\"]}}]}"}"#)
.create();
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let cac = decoder.decode(Some(&[0, 0, 0, 0, 6, 204, 240, 237, 74, 227, 188, 75, 46, 183, 163, 122, 214, 178, 72, 118, 162, 2]));
assert_eq!(cac, Ok(Value::Record(vec!(("id".to_string(), Value::Fixed(16, vec!(204, 240, 237, 74, 227, 188, 75, 46, 183, 163, 122, 214, 178, 72, 118, 162))), ("a_type".to_string(), Value::Enum(1, "MANUAL".to_string()))))));
}
#[test]
fn test_decoder_cache() {
let mut decoder = Decoder::new(format!("http://{}", server_address()));
let bytes = [0, 0, 0, 0, 2, 6];
let _m = mock("GET", "/schemas/ids/2")
.with_status(404)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"error_code":40403,"message":"Schema not found"}"#)
.create();
let heartbeat = decoder.decode(Some(&bytes));
assert_eq!(
heartbeat,
Err(SRCError::new(
"Did not get a 200 response code but 404 instead",
None,
false,
)
.into_cache())
);
let _m = mock("GET", "/schemas/ids/2")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let heartbeat = decoder.decode(Some(&bytes));
assert_eq!(
heartbeat,
Err(SRCError::new(
"Did not get a 200 response code but 404 instead",
None,
false,
)
.into_cache())
);
decoder.remove_errors_from_cache();
let heartbeat = decoder.decode(Some(&bytes));
assert_eq!(
heartbeat,
Ok(Value::Record(vec![("beat".to_string(), Value::Long(3))]))
)
}
#[test]
fn display_encode() {
let decoder = Encoder::new(server_address().to_string());
assert_eq!(
"Encoder { schema_registry_url: \"127.0.0.1:1234\", cache: {} }".to_owned(),
format!("{:?}", decoder)
)
}
#[test]
fn test_encode_key_and_value() {
let _m = mock("GET", "/subjects/heartbeat-value/versions/latest")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"subject":"heartbeat-value","version":1,"id":3,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let _n = mock("GET", "/subjects/heartbeat-key/versions/latest")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"subject":"heartbeat-value","version":1,"id":4,"schema":"{\"type\":\"record\",\"name\":\"Name\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"name\",\"type\":\"string\",\"avro.java.string\":\"String\"}]}"}"#)
.create();
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let key_strategy = SubjectNameStrategy::TopicNameStrategy("heartbeat".into(), true);
let bytes = encoder.encode(
vec![("name", Value::String("Some name".to_owned()))],
&key_strategy,
);
assert_eq!(
bytes,
Ok(vec![
0, 0, 0, 0, 4, 18, 83, 111, 109, 101, 32, 110, 97, 109, 101,
])
);
let value_strategy = SubjectNameStrategy::TopicNameStrategy("heartbeat".into(), false);
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &value_strategy);
assert_eq!(bytes, Ok(vec![0, 0, 0, 0, 3, 6]))
}
#[test]
fn test_using_record_name() {
let _m = mock("GET", "/subjects/heartbeat-nl.openweb.data.Heartbeat/versions/latest")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"subject":"heartbeat-value","version":1,"id":3,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let strategy = SubjectNameStrategy::TopicRecordNameStrategy(
"heartbeat".into(),
"nl.openweb.data.Heartbeat".into(),
);
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(bytes, Ok(vec![0, 0, 0, 0, 3, 6]))
}
#[test]
fn test_encoder_no_id_in_response() {
let _m = mock("GET", "/subjects/heartbeat-nl.openweb.data.Heartbeat/versions/latest")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"subject":"heartbeat-value","version":1,"no-id":3,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let strategy = SubjectNameStrategy::TopicRecordNameStrategy(
"heartbeat".into(),
"nl.openweb.data.Heartbeat".into(),
);
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
bytes,
Err(SRCError::new("Could not get id from response", None, false).into_cache())
)
}
#[test]
fn test_encoder_schema_registry_unavailable() {
let mut encoder = Encoder::new("http://bogus".into());
let strategy = SubjectNameStrategy::TopicRecordNameStrategy(
"heartbeat".into(),
"nl.openweb.data.Balance".into(),
);
let result = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
result,
Err(SRCError::new(
"error performing get to schema registry",
Some("Couldn\'t resolve host name"),
true,
)
.into_cache())
)
}
#[test]
fn test_encoder_unknown_protocol() {
let mut encoder = Encoder::new("hxxx://bogus".into());
let strategy = SubjectNameStrategy::TopicRecordNameStrategy(
"heartbeat".into(),
"nl.openweb.data.Balance".into(),
);
let result = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
result,
Err(SRCError::new(
"error performing get to schema registry",
Some("Unsupported protocol"),
true,
)
.into_cache())
)
}
#[test]
fn test_encoder_schema_registry_unavailable_with_record() {
let mut encoder = Encoder::new("http://bogus".into());
let heartbeat_schema = SuppliedSchema::new(r#"{"type":"record","name":"Balance","namespace":"nl.openweb.data","fields":[{"name":"beat","type":"long"}]}"#.into());
let strategy =
SubjectNameStrategy::RecordNameStrategyWithSchema(Box::from(heartbeat_schema));
let result = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
result,
Err(SRCError::new(
"error performing post to schema registry",
Some("Couldn\'t resolve host name"),
true,
)
.into_cache())
)
}
#[test]
fn test_encode_cache() {
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let strategy = SubjectNameStrategy::RecordNameStrategy("nl.openweb.data.Heartbeat".into());
let _m = mock("GET", "/subjects/nl.openweb.data.Heartbeat/versions/latest")
.with_status(404)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"error_code":40403,"message":"Schema not found"}"#)
.create();
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
bytes,
Err(SRCError::new(
"Did not get a 200 response code but 404 instead",
None,
false,
)
.into_cache())
);
let _n = mock("GET", "/subjects/nl.openweb.data.Heartbeat/versions/latest")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"subject":"heartbeat-value","version":1,"id":4,"schema":"{\"type\":\"record\",\"name\":\"Heartbeat\",\"namespace\":\"nl.openweb.data\",\"fields\":[{\"name\":\"beat\",\"type\":\"long\"}]}"}"#)
.create();
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
bytes,
Err(SRCError::new(
"Did not get a 200 response code but 404 instead",
None,
false,
)
.into_cache())
);
encoder.remove_errors_from_cache();
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(bytes, Ok(vec![0, 0, 0, 0, 4, 6]))
}
#[test]
fn test_encode_key_and_value_supplied_record() {
let _n = mock("POST", "/subjects/heartbeat-key/versions")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"id":3}"#)
.create();
let _m = mock("POST", "/subjects/heartbeat-value/versions")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"id":4}"#)
.create();
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let name_schema = SuppliedSchema::new(r#"{"type":"record","name":"Name","namespace":"nl.openweb.data","fields":[{"name":"name","type":"string","avro.java.string":"String"}]}"#.into());
let key_strategy = SubjectNameStrategy::TopicNameStrategyWithSchema(
"heartbeat".into(),
true,
Box::from(name_schema),
);
let bytes = encoder.encode(
vec![("name", Value::String("Some name".to_owned()))],
&key_strategy,
);
assert_eq!(
bytes,
Ok(vec![
0, 0, 0, 0, 3, 18, 83, 111, 109, 101, 32, 110, 97, 109, 101,
])
);
let heartbeat_schema = SuppliedSchema::new(r#"{"type":"record","name":"Heartbeat","namespace":"nl.openweb.data","fields":[{"name":"beat","type":"long"}]}"#.into());
let value_strategy = SubjectNameStrategy::TopicNameStrategyWithSchema(
"heartbeat".into(),
false,
Box::from(heartbeat_schema),
);
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &value_strategy);
assert_eq!(bytes, Ok(vec![0, 0, 0, 0, 4, 6]))
}
#[test]
fn test_encode_record_name_strategy_supplied_record() {
let _n = mock("POST", "/subjects/nl.openweb.data.Heartbeat/versions")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"id":11}"#)
.create();
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let heartbeat_schema = SuppliedSchema::new(r#"{"type":"record","name":"Heartbeat","namespace":"nl.openweb.data","fields":[{"name":"beat","type":"long"}]}"#.into());
let strategy =
SubjectNameStrategy::RecordNameStrategyWithSchema(Box::from(heartbeat_schema));
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(bytes, Ok(vec![0, 0, 0, 0, 11, 6]))
}
#[test]
fn test_encode_record_name_strategy_supplied_record_wrong_response() {
let _n = mock("POST", "/subjects/nl.openweb.data.Heartbeat/versions")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"no-id":11}"#)
.create();
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let heartbeat_schema = SuppliedSchema::new(r#"{"type":"record","name":"Heartbeat","namespace":"nl.openweb.data","fields":[{"name":"beat","type":"long"}]}"#.into());
let strategy =
SubjectNameStrategy::RecordNameStrategyWithSchema(Box::from(heartbeat_schema));
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
bytes,
Err(SRCError::new("Could not get id from response", None, false).into_cache())
)
}
#[test]
fn test_encode_topic_record_name_strategy_supplied_record() {
let _n = mock("POST", "/subjects/hb-nl.openweb.data.Heartbeat/versions")
.with_status(200)
.with_header("content-type", "application/vnd.schemaregistry.v1+json")
.with_body(r#"{"id":23}"#)
.create();
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let heartbeat_schema = SuppliedSchema::new(r#"{"type":"record","name":"Heartbeat","namespace":"nl.openweb.data","fields":[{"name":"beat","type":"long"}]}"#.into());
let strategy = SubjectNameStrategy::TopicRecordNameStrategyWithSchema(
"hb".into(),
Box::from(heartbeat_schema),
);
let bytes = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(bytes, Ok(vec![0, 0, 0, 0, 23, 6]))
}
#[test]
fn test_encode_topic_record_name_strategy_schema_registry_not_available() {
let mut encoder = Encoder::new(format!("http://{}", server_address()));
let heartbeat_schema = SuppliedSchema::new(r#"{"type":"record","name":"Heartbeat","namespace":"nl.openweb.data","fields":[{"name":"beat","type":"long"}]}"#.into());
let strategy = SubjectNameStrategy::TopicRecordNameStrategyWithSchema(
"hb".into(),
Box::from(heartbeat_schema),
);
let error = encoder.encode(vec![("beat", Value::Long(3))], &strategy);
assert_eq!(
error,
Err(SRCError::new(
"Did not get a 200 response code but 501 instead",
None,
false,
)
.into_cache())
)
}
#[test]
fn item_to_bytes_no_tranfer_wrong() {
let schema = Schema::parse_str(r#"{"type":"record","name":"Name","namespace":"nl.openweb.data","fields":[{"name":"name","type":"string","avro.java.string":"String"}]}"#).unwrap();
let result = crate::item_to_bytes(&schema, 5, Heartbeat { beat: 3 });
assert_eq!(
result,
Err(SRCError::new(
"error fixing record",
Some("missing field name in record"),
false,
))
)
}
#[test]
fn derive_but_not_valid_avro() {
let schema = Schema::parse_str(r#"{"type":"record","name":"Name","namespace":"nl.openweb.data","fields":[{"name":"name","type":"string","avro.java.string":"String"}]}"#).unwrap();
let mut map = HashMap::new();
map.insert(1, 2);
let result = crate::item_to_bytes(&schema, 5, NoWayAvro { map });
assert_eq!(
result,
Err(SRCError::new(
"Could not transform to avro_rs value",
Some("map key is not a string"),
false,
))
)
}
#[test]
fn item_to_bytes_proper_bytes_record_with_fixed() {
let schema = Schema::parse_str(r#"{"type":"record","name":"ConfirmAccountCreation","namespace":"nl.openweb.data","fields":[{"name":"id","type":{"type":"fixed","name":"Uuid","size":16}},{"name":"a_type","type":{"type":"enum","name":"Atype","symbols":["AUTO","MANUAL"]}}]}"#).unwrap();
let item = ConfirmAccountCreation {
id: [
204, 240, 237, 74, 227, 188, 75, 46, 183, 163, 122, 214, 178, 72, 118, 162,
],
a_type: Atype::Manual,
};
let result = crate::item_to_bytes(&schema, 6, item);
assert_eq!(
result,
Ok(vec!(0, 0, 0, 0, 6, 204, 240, 237, 74, 227, 188, 75, 46, 183, 163, 122, 214, 178, 72, 118, 162, 2))
)
}
}
|
// auto generated, do not modify.
// created: Mon Feb 22 23:57:02 2016
// src-file: /QtWidgets/qmainwindow.h
// dst-file: /src/widgets/qmainwindow.rs
//
// header block begin =>
#![feature(libc)]
#![feature(core)]
#![feature(collections)]
extern crate libc;
use self::libc::*;
// <= header block end
// main block begin =>
// <= main block end
// use block begin =>
use super::qwidget::*; // 773
use std::ops::Deref;
use super::qstatusbar::*; // 773
use super::qtoolbar::*; // 773
use super::qdockwidget::*; // 773
use super::super::core::qstring::*; // 771
use super::super::core::qsize::*; // 771
use super::super::core::qbytearray::*; // 771
use super::qmenu::*; // 773
use super::qmenubar::*; // 773
use super::super::core::qpoint::*; // 771
use super::super::core::qobjectdefs::*; // 771
// use super::qlist::*; // 775
// <= use block end
// ext block begin =>
// #[link(name = "Qt5Core")]
// #[link(name = "Qt5Gui")]
// #[link(name = "Qt5Widgets")]
// #[link(name = "QtInline")]
extern {
fn QMainWindow_Class_Size() -> c_int;
// proto: QStatusBar * QMainWindow::statusBar();
fn C_ZNK11QMainWindow9statusBarEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMainWindow::setAnimated(bool enabled);
fn C_ZN11QMainWindow11setAnimatedEb(qthis: u64 /* *mut c_void*/, arg0: c_char);
// proto: void QMainWindow::setDockNestingEnabled(bool enabled);
fn C_ZN11QMainWindow21setDockNestingEnabledEb(qthis: u64 /* *mut c_void*/, arg0: c_char);
// proto: bool QMainWindow::unifiedTitleAndToolBarOnMac();
fn C_ZNK11QMainWindow27unifiedTitleAndToolBarOnMacEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: QWidget * QMainWindow::menuWidget();
fn C_ZNK11QMainWindow10menuWidgetEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMainWindow::tabifyDockWidget(QDockWidget * first, QDockWidget * second);
fn C_ZN11QMainWindow16tabifyDockWidgetEP11QDockWidgetS1_(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void);
// proto: void QMainWindow::setDocumentMode(bool enabled);
fn C_ZN11QMainWindow15setDocumentModeEb(qthis: u64 /* *mut c_void*/, arg0: c_char);
// proto: QWidget * QMainWindow::centralWidget();
fn C_ZNK11QMainWindow13centralWidgetEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMainWindow::removeDockWidget(QDockWidget * dockwidget);
fn C_ZN11QMainWindow16removeDockWidgetEP11QDockWidget(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: bool QMainWindow::isAnimated();
fn C_ZNK11QMainWindow10isAnimatedEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: QToolBar * QMainWindow::addToolBar(const QString & title);
fn C_ZN11QMainWindow10addToolBarERK7QString(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
// proto: void QMainWindow::setIconSize(const QSize & iconSize);
fn C_ZN11QMainWindow11setIconSizeERK5QSize(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: QByteArray QMainWindow::saveState(int version);
fn C_ZNK11QMainWindow9saveStateEi(qthis: u64 /* *mut c_void*/, arg0: c_int) -> *mut c_void;
// proto: bool QMainWindow::restoreState(const QByteArray & state, int version);
fn C_ZN11QMainWindow12restoreStateERK10QByteArrayi(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: c_int) -> c_char;
// proto: void QMainWindow::insertToolBar(QToolBar * before, QToolBar * toolbar);
fn C_ZN11QMainWindow13insertToolBarEP8QToolBarS1_(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void);
// proto: QMenu * QMainWindow::createPopupMenu();
fn C_ZN11QMainWindow15createPopupMenuEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMainWindow::setUnifiedTitleAndToolBarOnMac(bool set);
fn C_ZN11QMainWindow30setUnifiedTitleAndToolBarOnMacEb(qthis: u64 /* *mut c_void*/, arg0: c_char);
// proto: void QMainWindow::addToolBar(QToolBar * toolbar);
fn C_ZN11QMainWindow10addToolBarEP8QToolBar(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: void QMainWindow::removeToolBarBreak(QToolBar * before);
fn C_ZN11QMainWindow18removeToolBarBreakEP8QToolBar(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: bool QMainWindow::toolBarBreak(QToolBar * toolbar);
fn C_ZNK11QMainWindow12toolBarBreakEP8QToolBar(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> c_char;
// proto: bool QMainWindow::restoreDockWidget(QDockWidget * dockwidget);
fn C_ZN11QMainWindow17restoreDockWidgetEP11QDockWidget(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> c_char;
// proto: QMenuBar * QMainWindow::menuBar();
fn C_ZNK11QMainWindow7menuBarEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMainWindow::setStatusBar(QStatusBar * statusbar);
fn C_ZN11QMainWindow12setStatusBarEP10QStatusBar(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: void QMainWindow::~QMainWindow();
fn C_ZN11QMainWindowD2Ev(qthis: u64 /* *mut c_void*/);
// proto: bool QMainWindow::isSeparator(const QPoint & pos);
fn C_ZNK11QMainWindow11isSeparatorERK6QPoint(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> c_char;
// proto: QSize QMainWindow::iconSize();
fn C_ZNK11QMainWindow8iconSizeEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: const QMetaObject * QMainWindow::metaObject();
fn C_ZNK11QMainWindow10metaObjectEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: void QMainWindow::insertToolBarBreak(QToolBar * before);
fn C_ZN11QMainWindow18insertToolBarBreakEP8QToolBar(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: QWidget * QMainWindow::takeCentralWidget();
fn C_ZN11QMainWindow17takeCentralWidgetEv(qthis: u64 /* *mut c_void*/) -> *mut c_void;
// proto: bool QMainWindow::isDockNestingEnabled();
fn C_ZNK11QMainWindow20isDockNestingEnabledEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: bool QMainWindow::documentMode();
fn C_ZNK11QMainWindow12documentModeEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: void QMainWindow::setMenuWidget(QWidget * menubar);
fn C_ZN11QMainWindow13setMenuWidgetEP7QWidget(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: void QMainWindow::removeToolBar(QToolBar * toolbar);
fn C_ZN11QMainWindow13removeToolBarEP8QToolBar(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: void QMainWindow::setCentralWidget(QWidget * widget);
fn C_ZN11QMainWindow16setCentralWidgetEP7QWidget(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: void QMainWindow::setMenuBar(QMenuBar * menubar);
fn C_ZN11QMainWindow10setMenuBarEP8QMenuBar(qthis: u64 /* *mut c_void*/, arg0: *mut c_void);
// proto: QList<QDockWidget *> QMainWindow::tabifiedDockWidgets(QDockWidget * dockwidget);
fn C_ZNK11QMainWindow19tabifiedDockWidgetsEP11QDockWidget(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void;
fn QMainWindow_SlotProxy_connect__ZN11QMainWindow15iconSizeChangedERK5QSize(qthis: *mut c_void, ffifptr: *mut c_void, rsfptr: *mut c_void);
} // <= ext block end
// body block begin =>
// class sizeof(QMainWindow)=1
#[derive(Default)]
pub struct QMainWindow {
qbase: QWidget,
pub qclsinst: u64 /* *mut c_void*/,
pub _toolButtonStyleChanged: QMainWindow_toolButtonStyleChanged_signal,
pub _iconSizeChanged: QMainWindow_iconSizeChanged_signal,
}
impl /*struct*/ QMainWindow {
pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QMainWindow {
return QMainWindow{qbase: QWidget::inheritFrom(qthis), qclsinst: qthis, ..Default::default()};
}
}
impl Deref for QMainWindow {
type Target = QWidget;
fn deref(&self) -> &QWidget {
return & self.qbase;
}
}
impl AsRef<QWidget> for QMainWindow {
fn as_ref(& self) -> & QWidget {
return & self.qbase;
}
}
// proto: QStatusBar * QMainWindow::statusBar();
impl /*struct*/ QMainWindow {
pub fn statusBar<RetType, T: QMainWindow_statusBar<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.statusBar(self);
// return 1;
}
}
pub trait QMainWindow_statusBar<RetType> {
fn statusBar(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QStatusBar * QMainWindow::statusBar();
impl<'a> /*trait*/ QMainWindow_statusBar<QStatusBar> for () {
fn statusBar(self , rsthis: & QMainWindow) -> QStatusBar {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow9statusBarEv()};
let mut ret = unsafe {C_ZNK11QMainWindow9statusBarEv(rsthis.qclsinst)};
let mut ret1 = QStatusBar::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMainWindow::setAnimated(bool enabled);
impl /*struct*/ QMainWindow {
pub fn setAnimated<RetType, T: QMainWindow_setAnimated<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setAnimated(self);
// return 1;
}
}
pub trait QMainWindow_setAnimated<RetType> {
fn setAnimated(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setAnimated(bool enabled);
impl<'a> /*trait*/ QMainWindow_setAnimated<()> for (i8) {
fn setAnimated(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow11setAnimatedEb()};
let arg0 = self as c_char;
unsafe {C_ZN11QMainWindow11setAnimatedEb(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMainWindow::setDockNestingEnabled(bool enabled);
impl /*struct*/ QMainWindow {
pub fn setDockNestingEnabled<RetType, T: QMainWindow_setDockNestingEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setDockNestingEnabled(self);
// return 1;
}
}
pub trait QMainWindow_setDockNestingEnabled<RetType> {
fn setDockNestingEnabled(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setDockNestingEnabled(bool enabled);
impl<'a> /*trait*/ QMainWindow_setDockNestingEnabled<()> for (i8) {
fn setDockNestingEnabled(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow21setDockNestingEnabledEb()};
let arg0 = self as c_char;
unsafe {C_ZN11QMainWindow21setDockNestingEnabledEb(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: bool QMainWindow::unifiedTitleAndToolBarOnMac();
impl /*struct*/ QMainWindow {
pub fn unifiedTitleAndToolBarOnMac<RetType, T: QMainWindow_unifiedTitleAndToolBarOnMac<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.unifiedTitleAndToolBarOnMac(self);
// return 1;
}
}
pub trait QMainWindow_unifiedTitleAndToolBarOnMac<RetType> {
fn unifiedTitleAndToolBarOnMac(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::unifiedTitleAndToolBarOnMac();
impl<'a> /*trait*/ QMainWindow_unifiedTitleAndToolBarOnMac<i8> for () {
fn unifiedTitleAndToolBarOnMac(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow27unifiedTitleAndToolBarOnMacEv()};
let mut ret = unsafe {C_ZNK11QMainWindow27unifiedTitleAndToolBarOnMacEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: QWidget * QMainWindow::menuWidget();
impl /*struct*/ QMainWindow {
pub fn menuWidget<RetType, T: QMainWindow_menuWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.menuWidget(self);
// return 1;
}
}
pub trait QMainWindow_menuWidget<RetType> {
fn menuWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QWidget * QMainWindow::menuWidget();
impl<'a> /*trait*/ QMainWindow_menuWidget<QWidget> for () {
fn menuWidget(self , rsthis: & QMainWindow) -> QWidget {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow10menuWidgetEv()};
let mut ret = unsafe {C_ZNK11QMainWindow10menuWidgetEv(rsthis.qclsinst)};
let mut ret1 = QWidget::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMainWindow::tabifyDockWidget(QDockWidget * first, QDockWidget * second);
impl /*struct*/ QMainWindow {
pub fn tabifyDockWidget<RetType, T: QMainWindow_tabifyDockWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.tabifyDockWidget(self);
// return 1;
}
}
pub trait QMainWindow_tabifyDockWidget<RetType> {
fn tabifyDockWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::tabifyDockWidget(QDockWidget * first, QDockWidget * second);
impl<'a> /*trait*/ QMainWindow_tabifyDockWidget<()> for (&'a QDockWidget, &'a QDockWidget) {
fn tabifyDockWidget(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow16tabifyDockWidgetEP11QDockWidgetS1_()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow16tabifyDockWidgetEP11QDockWidgetS1_(rsthis.qclsinst, arg0, arg1)};
// return 1;
}
}
// proto: void QMainWindow::setDocumentMode(bool enabled);
impl /*struct*/ QMainWindow {
pub fn setDocumentMode<RetType, T: QMainWindow_setDocumentMode<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setDocumentMode(self);
// return 1;
}
}
pub trait QMainWindow_setDocumentMode<RetType> {
fn setDocumentMode(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setDocumentMode(bool enabled);
impl<'a> /*trait*/ QMainWindow_setDocumentMode<()> for (i8) {
fn setDocumentMode(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow15setDocumentModeEb()};
let arg0 = self as c_char;
unsafe {C_ZN11QMainWindow15setDocumentModeEb(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QWidget * QMainWindow::centralWidget();
impl /*struct*/ QMainWindow {
pub fn centralWidget<RetType, T: QMainWindow_centralWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.centralWidget(self);
// return 1;
}
}
pub trait QMainWindow_centralWidget<RetType> {
fn centralWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QWidget * QMainWindow::centralWidget();
impl<'a> /*trait*/ QMainWindow_centralWidget<QWidget> for () {
fn centralWidget(self , rsthis: & QMainWindow) -> QWidget {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow13centralWidgetEv()};
let mut ret = unsafe {C_ZNK11QMainWindow13centralWidgetEv(rsthis.qclsinst)};
let mut ret1 = QWidget::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMainWindow::removeDockWidget(QDockWidget * dockwidget);
impl /*struct*/ QMainWindow {
pub fn removeDockWidget<RetType, T: QMainWindow_removeDockWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.removeDockWidget(self);
// return 1;
}
}
pub trait QMainWindow_removeDockWidget<RetType> {
fn removeDockWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::removeDockWidget(QDockWidget * dockwidget);
impl<'a> /*trait*/ QMainWindow_removeDockWidget<()> for (&'a QDockWidget) {
fn removeDockWidget(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow16removeDockWidgetEP11QDockWidget()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow16removeDockWidgetEP11QDockWidget(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: bool QMainWindow::isAnimated();
impl /*struct*/ QMainWindow {
pub fn isAnimated<RetType, T: QMainWindow_isAnimated<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isAnimated(self);
// return 1;
}
}
pub trait QMainWindow_isAnimated<RetType> {
fn isAnimated(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::isAnimated();
impl<'a> /*trait*/ QMainWindow_isAnimated<i8> for () {
fn isAnimated(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow10isAnimatedEv()};
let mut ret = unsafe {C_ZNK11QMainWindow10isAnimatedEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: QToolBar * QMainWindow::addToolBar(const QString & title);
impl /*struct*/ QMainWindow {
pub fn addToolBar<RetType, T: QMainWindow_addToolBar<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.addToolBar(self);
// return 1;
}
}
pub trait QMainWindow_addToolBar<RetType> {
fn addToolBar(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QToolBar * QMainWindow::addToolBar(const QString & title);
impl<'a> /*trait*/ QMainWindow_addToolBar<QToolBar> for (&'a QString) {
fn addToolBar(self , rsthis: & QMainWindow) -> QToolBar {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow10addToolBarERK7QString()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN11QMainWindow10addToolBarERK7QString(rsthis.qclsinst, arg0)};
let mut ret1 = QToolBar::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMainWindow::setIconSize(const QSize & iconSize);
impl /*struct*/ QMainWindow {
pub fn setIconSize<RetType, T: QMainWindow_setIconSize<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setIconSize(self);
// return 1;
}
}
pub trait QMainWindow_setIconSize<RetType> {
fn setIconSize(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setIconSize(const QSize & iconSize);
impl<'a> /*trait*/ QMainWindow_setIconSize<()> for (&'a QSize) {
fn setIconSize(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow11setIconSizeERK5QSize()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow11setIconSizeERK5QSize(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QByteArray QMainWindow::saveState(int version);
impl /*struct*/ QMainWindow {
pub fn saveState<RetType, T: QMainWindow_saveState<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.saveState(self);
// return 1;
}
}
pub trait QMainWindow_saveState<RetType> {
fn saveState(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QByteArray QMainWindow::saveState(int version);
impl<'a> /*trait*/ QMainWindow_saveState<QByteArray> for (Option<i32>) {
fn saveState(self , rsthis: & QMainWindow) -> QByteArray {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow9saveStateEi()};
let arg0 = (if self.is_none() {0} else {self.unwrap()}) as c_int;
let mut ret = unsafe {C_ZNK11QMainWindow9saveStateEi(rsthis.qclsinst, arg0)};
let mut ret1 = QByteArray::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: bool QMainWindow::restoreState(const QByteArray & state, int version);
impl /*struct*/ QMainWindow {
pub fn restoreState<RetType, T: QMainWindow_restoreState<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.restoreState(self);
// return 1;
}
}
pub trait QMainWindow_restoreState<RetType> {
fn restoreState(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::restoreState(const QByteArray & state, int version);
impl<'a> /*trait*/ QMainWindow_restoreState<i8> for (&'a QByteArray, Option<i32>) {
fn restoreState(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow12restoreStateERK10QByteArrayi()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = (if self.1.is_none() {0} else {self.1.unwrap()}) as c_int;
let mut ret = unsafe {C_ZN11QMainWindow12restoreStateERK10QByteArrayi(rsthis.qclsinst, arg0, arg1)};
return ret as i8; // 1
// return 1;
}
}
// proto: void QMainWindow::insertToolBar(QToolBar * before, QToolBar * toolbar);
impl /*struct*/ QMainWindow {
pub fn insertToolBar<RetType, T: QMainWindow_insertToolBar<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.insertToolBar(self);
// return 1;
}
}
pub trait QMainWindow_insertToolBar<RetType> {
fn insertToolBar(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::insertToolBar(QToolBar * before, QToolBar * toolbar);
impl<'a> /*trait*/ QMainWindow_insertToolBar<()> for (&'a QToolBar, &'a QToolBar) {
fn insertToolBar(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow13insertToolBarEP8QToolBarS1_()};
let arg0 = self.0.qclsinst as *mut c_void;
let arg1 = self.1.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow13insertToolBarEP8QToolBarS1_(rsthis.qclsinst, arg0, arg1)};
// return 1;
}
}
// proto: QMenu * QMainWindow::createPopupMenu();
impl /*struct*/ QMainWindow {
pub fn createPopupMenu<RetType, T: QMainWindow_createPopupMenu<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.createPopupMenu(self);
// return 1;
}
}
pub trait QMainWindow_createPopupMenu<RetType> {
fn createPopupMenu(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QMenu * QMainWindow::createPopupMenu();
impl<'a> /*trait*/ QMainWindow_createPopupMenu<QMenu> for () {
fn createPopupMenu(self , rsthis: & QMainWindow) -> QMenu {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow15createPopupMenuEv()};
let mut ret = unsafe {C_ZN11QMainWindow15createPopupMenuEv(rsthis.qclsinst)};
let mut ret1 = QMenu::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMainWindow::setUnifiedTitleAndToolBarOnMac(bool set);
impl /*struct*/ QMainWindow {
pub fn setUnifiedTitleAndToolBarOnMac<RetType, T: QMainWindow_setUnifiedTitleAndToolBarOnMac<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setUnifiedTitleAndToolBarOnMac(self);
// return 1;
}
}
pub trait QMainWindow_setUnifiedTitleAndToolBarOnMac<RetType> {
fn setUnifiedTitleAndToolBarOnMac(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setUnifiedTitleAndToolBarOnMac(bool set);
impl<'a> /*trait*/ QMainWindow_setUnifiedTitleAndToolBarOnMac<()> for (i8) {
fn setUnifiedTitleAndToolBarOnMac(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow30setUnifiedTitleAndToolBarOnMacEb()};
let arg0 = self as c_char;
unsafe {C_ZN11QMainWindow30setUnifiedTitleAndToolBarOnMacEb(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMainWindow::addToolBar(QToolBar * toolbar);
impl<'a> /*trait*/ QMainWindow_addToolBar<()> for (&'a QToolBar) {
fn addToolBar(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow10addToolBarEP8QToolBar()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow10addToolBarEP8QToolBar(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMainWindow::removeToolBarBreak(QToolBar * before);
impl /*struct*/ QMainWindow {
pub fn removeToolBarBreak<RetType, T: QMainWindow_removeToolBarBreak<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.removeToolBarBreak(self);
// return 1;
}
}
pub trait QMainWindow_removeToolBarBreak<RetType> {
fn removeToolBarBreak(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::removeToolBarBreak(QToolBar * before);
impl<'a> /*trait*/ QMainWindow_removeToolBarBreak<()> for (&'a QToolBar) {
fn removeToolBarBreak(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow18removeToolBarBreakEP8QToolBar()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow18removeToolBarBreakEP8QToolBar(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: bool QMainWindow::toolBarBreak(QToolBar * toolbar);
impl /*struct*/ QMainWindow {
pub fn toolBarBreak<RetType, T: QMainWindow_toolBarBreak<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.toolBarBreak(self);
// return 1;
}
}
pub trait QMainWindow_toolBarBreak<RetType> {
fn toolBarBreak(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::toolBarBreak(QToolBar * toolbar);
impl<'a> /*trait*/ QMainWindow_toolBarBreak<i8> for (&'a QToolBar) {
fn toolBarBreak(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow12toolBarBreakEP8QToolBar()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZNK11QMainWindow12toolBarBreakEP8QToolBar(rsthis.qclsinst, arg0)};
return ret as i8; // 1
// return 1;
}
}
// proto: bool QMainWindow::restoreDockWidget(QDockWidget * dockwidget);
impl /*struct*/ QMainWindow {
pub fn restoreDockWidget<RetType, T: QMainWindow_restoreDockWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.restoreDockWidget(self);
// return 1;
}
}
pub trait QMainWindow_restoreDockWidget<RetType> {
fn restoreDockWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::restoreDockWidget(QDockWidget * dockwidget);
impl<'a> /*trait*/ QMainWindow_restoreDockWidget<i8> for (&'a QDockWidget) {
fn restoreDockWidget(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow17restoreDockWidgetEP11QDockWidget()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZN11QMainWindow17restoreDockWidgetEP11QDockWidget(rsthis.qclsinst, arg0)};
return ret as i8; // 1
// return 1;
}
}
// proto: QMenuBar * QMainWindow::menuBar();
impl /*struct*/ QMainWindow {
pub fn menuBar<RetType, T: QMainWindow_menuBar<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.menuBar(self);
// return 1;
}
}
pub trait QMainWindow_menuBar<RetType> {
fn menuBar(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QMenuBar * QMainWindow::menuBar();
impl<'a> /*trait*/ QMainWindow_menuBar<QMenuBar> for () {
fn menuBar(self , rsthis: & QMainWindow) -> QMenuBar {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow7menuBarEv()};
let mut ret = unsafe {C_ZNK11QMainWindow7menuBarEv(rsthis.qclsinst)};
let mut ret1 = QMenuBar::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMainWindow::setStatusBar(QStatusBar * statusbar);
impl /*struct*/ QMainWindow {
pub fn setStatusBar<RetType, T: QMainWindow_setStatusBar<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setStatusBar(self);
// return 1;
}
}
pub trait QMainWindow_setStatusBar<RetType> {
fn setStatusBar(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setStatusBar(QStatusBar * statusbar);
impl<'a> /*trait*/ QMainWindow_setStatusBar<()> for (&'a QStatusBar) {
fn setStatusBar(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow12setStatusBarEP10QStatusBar()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow12setStatusBarEP10QStatusBar(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMainWindow::~QMainWindow();
impl /*struct*/ QMainWindow {
pub fn free<RetType, T: QMainWindow_free<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.free(self);
// return 1;
}
}
pub trait QMainWindow_free<RetType> {
fn free(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::~QMainWindow();
impl<'a> /*trait*/ QMainWindow_free<()> for () {
fn free(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindowD2Ev()};
unsafe {C_ZN11QMainWindowD2Ev(rsthis.qclsinst)};
// return 1;
}
}
// proto: bool QMainWindow::isSeparator(const QPoint & pos);
impl /*struct*/ QMainWindow {
pub fn isSeparator<RetType, T: QMainWindow_isSeparator<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isSeparator(self);
// return 1;
}
}
pub trait QMainWindow_isSeparator<RetType> {
fn isSeparator(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::isSeparator(const QPoint & pos);
impl<'a> /*trait*/ QMainWindow_isSeparator<i8> for (&'a QPoint) {
fn isSeparator(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow11isSeparatorERK6QPoint()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZNK11QMainWindow11isSeparatorERK6QPoint(rsthis.qclsinst, arg0)};
return ret as i8; // 1
// return 1;
}
}
// proto: QSize QMainWindow::iconSize();
impl /*struct*/ QMainWindow {
pub fn iconSize<RetType, T: QMainWindow_iconSize<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.iconSize(self);
// return 1;
}
}
pub trait QMainWindow_iconSize<RetType> {
fn iconSize(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QSize QMainWindow::iconSize();
impl<'a> /*trait*/ QMainWindow_iconSize<QSize> for () {
fn iconSize(self , rsthis: & QMainWindow) -> QSize {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow8iconSizeEv()};
let mut ret = unsafe {C_ZNK11QMainWindow8iconSizeEv(rsthis.qclsinst)};
let mut ret1 = QSize::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: const QMetaObject * QMainWindow::metaObject();
impl /*struct*/ QMainWindow {
pub fn metaObject<RetType, T: QMainWindow_metaObject<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.metaObject(self);
// return 1;
}
}
pub trait QMainWindow_metaObject<RetType> {
fn metaObject(self , rsthis: & QMainWindow) -> RetType;
}
// proto: const QMetaObject * QMainWindow::metaObject();
impl<'a> /*trait*/ QMainWindow_metaObject<QMetaObject> for () {
fn metaObject(self , rsthis: & QMainWindow) -> QMetaObject {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow10metaObjectEv()};
let mut ret = unsafe {C_ZNK11QMainWindow10metaObjectEv(rsthis.qclsinst)};
let mut ret1 = QMetaObject::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: void QMainWindow::insertToolBarBreak(QToolBar * before);
impl /*struct*/ QMainWindow {
pub fn insertToolBarBreak<RetType, T: QMainWindow_insertToolBarBreak<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.insertToolBarBreak(self);
// return 1;
}
}
pub trait QMainWindow_insertToolBarBreak<RetType> {
fn insertToolBarBreak(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::insertToolBarBreak(QToolBar * before);
impl<'a> /*trait*/ QMainWindow_insertToolBarBreak<()> for (&'a QToolBar) {
fn insertToolBarBreak(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow18insertToolBarBreakEP8QToolBar()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow18insertToolBarBreakEP8QToolBar(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QWidget * QMainWindow::takeCentralWidget();
impl /*struct*/ QMainWindow {
pub fn takeCentralWidget<RetType, T: QMainWindow_takeCentralWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.takeCentralWidget(self);
// return 1;
}
}
pub trait QMainWindow_takeCentralWidget<RetType> {
fn takeCentralWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QWidget * QMainWindow::takeCentralWidget();
impl<'a> /*trait*/ QMainWindow_takeCentralWidget<QWidget> for () {
fn takeCentralWidget(self , rsthis: & QMainWindow) -> QWidget {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow17takeCentralWidgetEv()};
let mut ret = unsafe {C_ZN11QMainWindow17takeCentralWidgetEv(rsthis.qclsinst)};
let mut ret1 = QWidget::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: bool QMainWindow::isDockNestingEnabled();
impl /*struct*/ QMainWindow {
pub fn isDockNestingEnabled<RetType, T: QMainWindow_isDockNestingEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isDockNestingEnabled(self);
// return 1;
}
}
pub trait QMainWindow_isDockNestingEnabled<RetType> {
fn isDockNestingEnabled(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::isDockNestingEnabled();
impl<'a> /*trait*/ QMainWindow_isDockNestingEnabled<i8> for () {
fn isDockNestingEnabled(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow20isDockNestingEnabledEv()};
let mut ret = unsafe {C_ZNK11QMainWindow20isDockNestingEnabledEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: bool QMainWindow::documentMode();
impl /*struct*/ QMainWindow {
pub fn documentMode<RetType, T: QMainWindow_documentMode<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.documentMode(self);
// return 1;
}
}
pub trait QMainWindow_documentMode<RetType> {
fn documentMode(self , rsthis: & QMainWindow) -> RetType;
}
// proto: bool QMainWindow::documentMode();
impl<'a> /*trait*/ QMainWindow_documentMode<i8> for () {
fn documentMode(self , rsthis: & QMainWindow) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow12documentModeEv()};
let mut ret = unsafe {C_ZNK11QMainWindow12documentModeEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: void QMainWindow::setMenuWidget(QWidget * menubar);
impl /*struct*/ QMainWindow {
pub fn setMenuWidget<RetType, T: QMainWindow_setMenuWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setMenuWidget(self);
// return 1;
}
}
pub trait QMainWindow_setMenuWidget<RetType> {
fn setMenuWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setMenuWidget(QWidget * menubar);
impl<'a> /*trait*/ QMainWindow_setMenuWidget<()> for (&'a QWidget) {
fn setMenuWidget(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow13setMenuWidgetEP7QWidget()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow13setMenuWidgetEP7QWidget(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMainWindow::removeToolBar(QToolBar * toolbar);
impl /*struct*/ QMainWindow {
pub fn removeToolBar<RetType, T: QMainWindow_removeToolBar<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.removeToolBar(self);
// return 1;
}
}
pub trait QMainWindow_removeToolBar<RetType> {
fn removeToolBar(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::removeToolBar(QToolBar * toolbar);
impl<'a> /*trait*/ QMainWindow_removeToolBar<()> for (&'a QToolBar) {
fn removeToolBar(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow13removeToolBarEP8QToolBar()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow13removeToolBarEP8QToolBar(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMainWindow::setCentralWidget(QWidget * widget);
impl /*struct*/ QMainWindow {
pub fn setCentralWidget<RetType, T: QMainWindow_setCentralWidget<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setCentralWidget(self);
// return 1;
}
}
pub trait QMainWindow_setCentralWidget<RetType> {
fn setCentralWidget(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setCentralWidget(QWidget * widget);
impl<'a> /*trait*/ QMainWindow_setCentralWidget<()> for (&'a QWidget) {
fn setCentralWidget(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow16setCentralWidgetEP7QWidget()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow16setCentralWidgetEP7QWidget(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: void QMainWindow::setMenuBar(QMenuBar * menubar);
impl /*struct*/ QMainWindow {
pub fn setMenuBar<RetType, T: QMainWindow_setMenuBar<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setMenuBar(self);
// return 1;
}
}
pub trait QMainWindow_setMenuBar<RetType> {
fn setMenuBar(self , rsthis: & QMainWindow) -> RetType;
}
// proto: void QMainWindow::setMenuBar(QMenuBar * menubar);
impl<'a> /*trait*/ QMainWindow_setMenuBar<()> for (&'a QMenuBar) {
fn setMenuBar(self , rsthis: & QMainWindow) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN11QMainWindow10setMenuBarEP8QMenuBar()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN11QMainWindow10setMenuBarEP8QMenuBar(rsthis.qclsinst, arg0)};
// return 1;
}
}
// proto: QList<QDockWidget *> QMainWindow::tabifiedDockWidgets(QDockWidget * dockwidget);
impl /*struct*/ QMainWindow {
pub fn tabifiedDockWidgets<RetType, T: QMainWindow_tabifiedDockWidgets<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.tabifiedDockWidgets(self);
// return 1;
}
}
pub trait QMainWindow_tabifiedDockWidgets<RetType> {
fn tabifiedDockWidgets(self , rsthis: & QMainWindow) -> RetType;
}
// proto: QList<QDockWidget *> QMainWindow::tabifiedDockWidgets(QDockWidget * dockwidget);
impl<'a> /*trait*/ QMainWindow_tabifiedDockWidgets<u64> for (&'a QDockWidget) {
fn tabifiedDockWidgets(self , rsthis: & QMainWindow) -> u64 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK11QMainWindow19tabifiedDockWidgetsEP11QDockWidget()};
let arg0 = self.qclsinst as *mut c_void;
let mut ret = unsafe {C_ZNK11QMainWindow19tabifiedDockWidgetsEP11QDockWidget(rsthis.qclsinst, arg0)};
return ret as u64; // 5
// return 1;
}
}
#[derive(Default)] // for QMainWindow_toolButtonStyleChanged
pub struct QMainWindow_toolButtonStyleChanged_signal{poi:u64}
impl /* struct */ QMainWindow {
pub fn toolButtonStyleChanged(&self) -> QMainWindow_toolButtonStyleChanged_signal {
return QMainWindow_toolButtonStyleChanged_signal{poi:self.qclsinst};
}
}
impl /* struct */ QMainWindow_toolButtonStyleChanged_signal {
pub fn connect<T: QMainWindow_toolButtonStyleChanged_signal_connect>(self, overload_args: T) {
overload_args.connect(self);
}
}
pub trait QMainWindow_toolButtonStyleChanged_signal_connect {
fn connect(self, sigthis: QMainWindow_toolButtonStyleChanged_signal);
}
#[derive(Default)] // for QMainWindow_iconSizeChanged
pub struct QMainWindow_iconSizeChanged_signal{poi:u64}
impl /* struct */ QMainWindow {
pub fn iconSizeChanged(&self) -> QMainWindow_iconSizeChanged_signal {
return QMainWindow_iconSizeChanged_signal{poi:self.qclsinst};
}
}
impl /* struct */ QMainWindow_iconSizeChanged_signal {
pub fn connect<T: QMainWindow_iconSizeChanged_signal_connect>(self, overload_args: T) {
overload_args.connect(self);
}
}
pub trait QMainWindow_iconSizeChanged_signal_connect {
fn connect(self, sigthis: QMainWindow_iconSizeChanged_signal);
}
// iconSizeChanged(const class QSize &)
extern fn QMainWindow_iconSizeChanged_signal_connect_cb_0(rsfptr:fn(QSize), arg0: *mut c_void) {
println!("{}:{}", file!(), line!());
let rsarg0 = QSize::inheritFrom(arg0 as u64);
rsfptr(rsarg0);
}
extern fn QMainWindow_iconSizeChanged_signal_connect_cb_box_0(rsfptr_raw:*mut Box<Fn(QSize)>, arg0: *mut c_void) {
println!("{}:{}", file!(), line!());
let rsfptr = unsafe{Box::from_raw(rsfptr_raw)};
let rsarg0 = QSize::inheritFrom(arg0 as u64);
// rsfptr(rsarg0);
unsafe{(*rsfptr_raw)(rsarg0)};
}
impl /* trait */ QMainWindow_iconSizeChanged_signal_connect for fn(QSize) {
fn connect(self, sigthis: QMainWindow_iconSizeChanged_signal) {
// do smth...
// self as u64; // error for Fn, Ok for fn
self as *mut c_void as u64;
self as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMainWindow_iconSizeChanged_signal_connect_cb_0 as *mut c_void;
let arg2 = self as *mut c_void;
unsafe {QMainWindow_SlotProxy_connect__ZN11QMainWindow15iconSizeChangedERK5QSize(arg0, arg1, arg2)};
}
}
impl /* trait */ QMainWindow_iconSizeChanged_signal_connect for Box<Fn(QSize)> {
fn connect(self, sigthis: QMainWindow_iconSizeChanged_signal) {
// do smth...
// Box::into_raw(self) as u64;
// Box::into_raw(self) as *mut c_void;
let arg0 = sigthis.poi as *mut c_void;
let arg1 = QMainWindow_iconSizeChanged_signal_connect_cb_box_0 as *mut c_void;
let arg2 = Box::into_raw(Box::new(self)) as *mut c_void;
unsafe {QMainWindow_SlotProxy_connect__ZN11QMainWindow15iconSizeChangedERK5QSize(arg0, arg1, arg2)};
}
}
// <= body block end
|
use std::rc::Rc;
use serialize::json::Json;
use vec::Vec3;
use material::{ Color, Material };
use object::{ Object, Objects, Rotate, Sphere, Plane, Dir, AARect, AABox, AAHexa };
use light::{ Light, Lights, Bulb, Sun };
use scene::{ Picture, Eye, Scene };
pub fn load(input: &str) -> (Eye, Scene, Picture) {
let root = Json::from_str(input).unwrap();
(load_eye(&root, "eye"), load_scene(&root, "scene"), load_picture(&root, "picture"))
}
// Picture
fn load_picture(root: &Json, key: &str) -> Picture {
let obj = root.find(key).unwrap();
Picture::new(
load_u32(obj, "w"),
load_u32(obj, "h"),
load_str(obj, "path"),
load_u32_or(obj, "bounce", 5),
load_u32_or(obj, "sample", 1),
)
}
// Eye
fn load_eye(root: &Json, key: &str) -> Eye {
let obj = root.find(key).unwrap();
Eye::new(
load_vec3(obj, "pos"),
load_vec3(obj, "dir"),
load_f64(obj, "fov"),
)
}
// Scene
fn load_scene<'a>(root: &Json, key: &str) -> Scene<'a> {
let obj = root.find(key).unwrap();
Scene::new(
load_objects(obj, "objects"),
load_lights(obj, "lights"),
load_f64_or(obj, "ambient", 0.2),
load_color_or(obj, "back", Color::new(0.39, 0.8, 0.92)),
)
}
// Lights
fn load_lights<'a>(root: &Json, key: &str) -> Lights<'a> {
let array = root.find(key).unwrap().as_array().unwrap();
let all = array.iter().map(|obj| {
let key = obj.as_object().unwrap().keys().next().unwrap();
match key.as_slice() {
"bulb" => box load_bulb(obj, key) as Box<Light>,
"sun" => box load_sun(obj, key) as Box<Light>,
_ => panic!("Not a light"),
}
}).collect();
Lights::new(all)
}
// Sun
fn load_sun(root: &Json, key: &str) -> Sun {
let obj = root.find(key).unwrap();
Sun::new(
load_vec3(obj, "dir"),
load_f64(obj, "spec"),
load_i32(obj, "shin"),
load_f64(obj, "diff"),
)
}
// Bulb
fn load_bulb(root: &Json, key: &str) -> Bulb {
let obj = root.find(key).unwrap();
Bulb::new(
load_vec3(obj, "pos"),
load_f64(obj, "spec"),
load_i32(obj, "shin"),
load_f64(obj, "diff"),
)
}
// Objects
fn load_objects<'a>(root: &Json, key: &str) -> Objects<'a> {
let array = root.find(key).unwrap().as_array().unwrap();
let all = array.iter().map(|obj| load_object(obj)).collect();
Objects::new(all)
}
// Object
fn load_object(root: &Json) -> Box<Object> {
let key = root.as_object().unwrap().keys().next().unwrap();
match key.as_slice() {
"rotate" => box load_rotate(root, key) as Box<Object>,
"sphere" => box load_sphere(root, key) as Box<Object>,
"plane" => box load_plane(root, key) as Box<Object>,
"aarect" => box load_aarect(root, key) as Box<Object>,
"aabox" => box load_aabox(root, key) as Box<Object>,
"aahexa" => box load_aahexa(root, key) as Box<Object>,
_ => panic!("Not an object"),
}
}
// Rotate
fn load_rotate<'a>(root: &Json, key: &str) -> Rotate<'a> {
let obj = root.find(key).unwrap();
Rotate::new(
load_vec3(obj, "pos"),
load_vec3(obj, "dir"),
load_object(obj.find("object").unwrap()),
)
}
// AAHexa
fn load_aahexa<'a>(root: &Json, key: &str) -> AAHexa<'a> {
let obj = root.find(key).unwrap();
AAHexa::new(
load_vec3(obj, "pos"),
load_f64(obj, "x"),
load_f64(obj, "y"),
Rc::new(load_material(obj, "mat")),
)
}
// AABox
fn load_aabox<'a>(root: &Json, key: &str) -> AABox<'a> {
let obj = root.find(key).unwrap();
AABox::new(
load_vec3(obj, "pos"),
load_vec3(obj, "dim"),
Rc::new(load_material(obj, "mat")),
load_bool(obj, "skybox"),
)
}
// AARect
fn load_aarect(root: &Json, key: &str) -> AARect {
let obj = root.find(key).unwrap();
AARect::new(
load_vec3(obj, "pos"),
load_dir(obj, "dir"),
load_vec3(obj, "dim"),
Rc::new(load_material(obj, "mat")),
)
}
// Dir
fn load_dir(root: &Json, key: &str) -> Dir {
match root.find(key).unwrap().as_string().unwrap() {
"left" => Dir::Left,
"right" => Dir::Right,
"top" => Dir::Top,
"bottom" => Dir::Bottom,
"front" => Dir::Front,
"back" => Dir::Back,
_ => panic!("Not a Dir"),
}
}
// Plane
fn load_plane(root: &Json, key: &str) -> Plane {
let obj = root.find(key).unwrap();
Plane::new(
load_vec3(obj, "pos"),
load_vec3(obj, "normal"),
Rc::new(load_material(obj, "mat")),
)
}
// Sphere
fn load_sphere(root: &Json, key: &str) -> Sphere {
let obj = root.find(key).unwrap();
Sphere::new(
load_vec3(obj, "pos"),
load_f64(obj, "radius"),
Rc::new(load_material(obj, "mat")),
)
}
// Material
fn load_material(root: &Json, key: &str) -> Material {
let obj = root.find(key).unwrap();
Material::new(
load_color(obj, "color"),
load_f64_or(obj, "spec", 0.),
load_f64_or(obj, "diff", 1.),
load_f64_or(obj, "refr", 0.),
load_f64_or(obj, "refr-idx", 1.),
load_f64_or(obj, "refl", 0.),
)
}
// Color
fn load_color(root: &Json, key: &str) -> Color {
let obj = root.find(key).unwrap();
Color::new(
load_f64(obj, "r"),
load_f64(obj, "g"),
load_f64(obj, "b"),
)
}
fn load_color_or(root: &Json, key: &str, def: Color) -> Color {
let obj = root.find(key);
if obj.is_none() {
return def;
}
Color::new(
load_f64(obj.unwrap(), "r"),
load_f64(obj.unwrap(), "g"),
load_f64(obj.unwrap(), "b"),
)
}
// Vec3
fn load_vec3(root: &Json, key: &str) -> Vec3 {
let obj = root.find(key).unwrap();
Vec3::new(
load_f64(obj, "x"),
load_f64(obj, "y"),
load_f64(obj, "z"),
)
}
// String
fn load_str<'a>(root: &'a Json, key: &str) -> &'a str {
root.find(key).unwrap().as_string().unwrap()
}
// f64
fn load_f64(root: &Json, key: &str) -> f64 {
root.find(key).unwrap().as_f64().unwrap()
}
fn load_f64_or(root: &Json, key: &str, def: f64) -> f64 {
let obj = root.find(key);
if obj.is_none() {
return def;
}
obj.unwrap().as_f64().unwrap()
}
// u32
fn load_u32(root: &Json, key: &str) -> u32 {
root.find(key).unwrap().as_i64().unwrap() as u32
}
fn load_u32_or(root: &Json, key: &str, def: u32) -> u32 {
let obj = root.find(key);
if obj.is_none() {
return def;
}
obj.unwrap().as_i64().unwrap() as u32
}
// i32
fn load_i32(root: &Json, key: &str) -> i32 {
root.find(key).unwrap().as_i64().unwrap() as i32
}
// bool
fn load_bool(root: &Json, key: &str) -> bool {
root.find(key).unwrap().as_boolean().unwrap()
}
|
#![cfg_attr(not(feature = "std"), no_std)]
#[macro_export]
macro_rules! write_all {
($owner:expr => $($cells:expr),*) => {{
$crate::write_all!(@destruct [()] [$(($cells))*] $owner.write_all($crate::hlist!($(&$cells as &$crate::ICell<_, _>),*)))
}};
(@destruct [$($rest:tt)*] [$first:tt $($cells:tt)*] $value:expr) => {
match $value {
$crate::hlist::Cons(value, rest) => $crate::write_all!(@destruct [$($rest)* , value] [$($cells)*] rest)
}
};
(@destruct [()] [] $value:expr) => {
match $value { $crate::hlist::Nil => () }
};
(@destruct [(), $($rest:tt)*] [] $value:expr) => {
match $value { $crate::hlist::Nil => ($($rest)*) }
};
}
mod core;
pub use self::core::{ICell, Identifier, Owner, Transparent};
#[doc(hidden)]
pub mod hlist;
pub mod immovable;
pub mod runtime;
pub mod scoped;
pub mod typeid;
#[cfg(feature = "std")]
pub mod typeid_tl;
|
use crate::homogeneous::Homogeneous;
use numpy::{IntoPyArray, PyArray1, PyArray2};
use pyo3::prelude::{pyfunction, pymodule, Py, PyModule, PyResult, Python};
use pyo3::wrap_pyfunction;
#[pyfunction]
fn to_homogeneous_vec(py: Python<'_>, x: &PyArray1<f64>) -> Py<PyArray1<f64>> {
Homogeneous::to_homogeneous(&x.as_array())
.into_pyarray(py)
.to_owned()
}
#[pyfunction]
fn to_homogeneous_vecs(py: Python<'_>, xs: &PyArray2<f64>) -> Py<PyArray2<f64>> {
Homogeneous::to_homogeneous(&xs.as_array())
.into_pyarray(py)
.to_owned()
}
#[pyfunction]
fn from_homogeneous_vec(py: Python<'_>, x: &PyArray1<f64>) -> Py<PyArray1<f64>> {
Homogeneous::from_homogeneous(&x.as_array())
.to_owned()
.into_pyarray(py)
.to_owned()
}
#[pyfunction]
fn from_homogeneous_vecs(py: Python<'_>, xs: &PyArray2<f64>) -> Py<PyArray2<f64>> {
Homogeneous::from_homogeneous(&xs.as_array())
.to_owned()
.into_pyarray(py)
.to_owned()
}
#[pymodule(homogeneous)]
fn homogeneous_module(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_wrapped(wrap_pyfunction!(to_homogeneous_vec))?;
m.add_wrapped(wrap_pyfunction!(to_homogeneous_vecs))?;
m.add_wrapped(wrap_pyfunction!(from_homogeneous_vec))?;
m.add_wrapped(wrap_pyfunction!(from_homogeneous_vecs))?;
Ok(())
}
|
//! The following is derived from Rust's
//! library/std/src/os/fd/mod.rs at revision
//! fa68e73e9947be8ffc5b3b46d899e4953a44e7e9.
//!
//! All code in this file is licensed MIT or Apache 2.0 at your option.
//!
//! Owned and borrowed Unix-like file descriptors.
#![cfg_attr(staged_api, unstable(feature = "io_safety", issue = "87074"))]
#![deny(unsafe_op_in_unsafe_fn)]
// `RawFd`, `AsRawFd`, etc.
mod raw;
// `OwnedFd`, `AsFd`, etc.
mod owned;
pub use owned::*;
pub use raw::*;
|
// Copyright 2020 - 2021 Alex Dukhno
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::*;
#[rstest::rstest]
fn create_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "create schema schema_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaCreated));
}
#[rstest::rstest]
fn create_same_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "create schema schema_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaCreated));
engine
.execute(Inbound::Query {
sql: "create schema schema_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::schema_already_exists("schema_name")));
}
#[rstest::rstest]
fn drop_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "create schema schema_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaCreated));
engine
.execute(Inbound::Query {
sql: "drop schema schema_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaDropped));
}
#[rstest::rstest]
fn drop_non_existent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "drop schema non_existent;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::schema_does_not_exist("non_existent")));
}
#[rstest::rstest]
fn drop_if_exists_non_existent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "drop schema if exists non_existent;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaDropped));
}
#[rstest::rstest]
fn drop_if_exists_existent_and_non_existent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "create schema existent_schema;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaCreated));
engine
.execute(Inbound::Query {
sql: "drop schema if exists non_existent, existent_schema;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaDropped));
engine
.execute(Inbound::Query {
sql: "create schema existent_schema;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Ok(QueryEvent::SchemaCreated));
}
#[rstest::rstest]
fn select_from_nonexistent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "select * from non_existent.some_table;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::schema_does_not_exist("non_existent")));
}
#[rstest::rstest]
fn select_named_columns_from_nonexistent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "select column_1 from schema_name.table_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::schema_does_not_exist("schema_name")));
}
#[rstest::rstest]
fn insert_into_table_in_nonexistent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "insert into schema_name.table_name values (123);".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::schema_does_not_exist("schema_name")));
}
#[rstest::rstest]
fn update_records_in_table_from_non_existent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "update schema_name.table_name set column_test=789;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::schema_does_not_exist("schema_name")));
}
#[rstest::rstest]
fn delete_from_table_in_nonexistent_schema(empty_database: (InMemory, ResultCollector)) {
let (mut engine, collector) = empty_database;
engine
.execute(Inbound::Query {
sql: "delete from schema_name.table_name;".to_owned(),
})
.expect("query executed");
collector
.lock()
.unwrap()
.assert_receive_single(Err(QueryError::schema_does_not_exist("schema_name")));
}
|
use std::fs::File;
use std::io::prelude::*;
// The file, poker.txt, contains one-thousand random hands dealt to two players. Each line of the file contains ten cards (separated by a single space): the first five are Player 1's cards and the last five are Player 2's cards. You can assume that all hands are valid (no invalid characters or repeated cards), each player's hand is in no specific order, and in each hand there is a clear winner.
// How many hands does Player 1 win?
const CLUBS: u8 = 0;
const DIAMONDS: u8 = 1;
const SPADES: u8 = 2;
const HEARTS: u8 = 3;
struct Card {
suit: u8, // clubs=0, diamonds=1, spades=2, hearts=3
rank: u8, // 2, 3, 4, 5, 6, 7, 8, 9, 10, Jack, Queen, King, Ace
}
struct Hand {
cards: [Card; 5],
}
// straight flush ranks multiples (rank, num)
fn evaluate_hand(hand: &Hand) -> (bool, bool, Vec<u8>, Vec<(u8, u8)>) {
let mut straight: bool = true;
let mut flush: bool = true;
let mut ranks: Vec<u8> = Vec::new();
let mut multiples: Vec<(u8, u8)> = Vec::new();
let suit: u8 = hand.cards[0].suit;
let mut numbers: [u8; 13] = [0; 13]; // keep a track of how manu of each rank are present;
// gather the card ranks and check for a flush
for card in hand.cards.iter() {
ranks.push(card.rank);
if flush {
if card.suit != suit {
flush = false;
}
}
}
ranks.sort();
// gather multiples and check for a straight
for i in 0..ranks.len() {
numbers[ranks[i] as usize] += 1;
if straight && i >= 1 {
if ranks[i] != ranks[i - 1] + 1 {
straight = false
}
}
}
if !straight && !flush {
// build multiples vector
for i in 0..numbers.len() {
if numbers[i] > 1 {
multiples.push((i as u8, numbers[i]));
}
}
multiples.sort();
}
(straight, flush, ranks, multiples)
}
// check for two pairs
// chekc for one pair
// check for high card
impl Hand {
fn beats(&self, aponent: Hand) -> bool {
let (me_straight, me_flush, me_ranks, me_multiples) = evaluate_hand(self);
let (them_straight, them_flush, them_ranks, them_multiples) = evaluate_hand(&aponent);
let mut j: usize;
let mut me_straight_flush_rank = 0;
let mut them_straight_flush_rank = 0;
// if I have a straight flush
if me_straight && me_flush {
me_straight_flush_rank = me_ranks[0];
}
// or they have a straight flush
if them_straight && them_flush {
them_straight_flush_rank = them_ranks[0];
}
// biggest straight flush wins
if me_straight_flush_rank != 0 || them_straight_flush_rank != 0 {
return me_straight_flush_rank > them_straight_flush_rank;
}
let mut me_4_kind_rank = 0;
let mut them_4_kind_rank = 0;
// if I have 4 of a kind
if me_multiples.len() == 1 && me_multiples[0].1 == 4 {
me_4_kind_rank = me_multiples[0].0;
}
// if they have 4 of a kind
if them_multiples.len() == 1 && them_multiples[0].1 == 4 {
them_4_kind_rank = them_multiples[0].0;
}
// biggest 4 of a kind wins
if me_4_kind_rank != 0 || them_4_kind_rank != 0 {
return me_4_kind_rank > them_4_kind_rank;
}
let mut me_full_house = false; // rank of the 3 of a kind card
let mut them_full_house = false; // rank of the 3 of a kind card
// if I have a full house
if me_multiples.len() == 2 {
if me_multiples[1].1 == 3 {
me_full_house = true;
}
}
// if they have a full house
if them_multiples.len() == 2 {
if them_multiples[1].1 == 3 {
them_full_house = true;
}
}
if me_full_house {
if them_full_house {
return me_multiples[1].0 > them_multiples[1].0;
}
return true;
} else if them_full_house {
return false;
}
// check for flushes
if me_flush {
if them_flush {
j = me_ranks.len();
for _i in 0..me_ranks.len() {
j -= 1;
if me_ranks[j] != them_ranks[j] {
return me_ranks[j] > them_ranks[j];
}
}
return false; // I guess it is a draw so player 1 does not win ??
}
return true;
} else if them_flush {
return false;
}
// check for straights
if me_straight {
if them_straight {
if me_ranks[0] != them_ranks[0] {
return me_ranks[0] > them_ranks[0];
}
return false; // another draw
}
return true; // I win
} else if them_straight {
return false; // they win
}
// check for 3 of a kind
if me_multiples.len() == 1 && me_multiples[0].1 == 3 {
if them_multiples.len() < 1 || them_multiples[0].1 < 3 {
return true;
}
// both have 3 of a kind, highest rank wins
return me_multiples[0].0 > them_multiples[0].0;
} else if them_multiples.len() == 1 && them_multiples[0].1 == 3 {
return false; // they win
}
// check for 2 pairs
if me_multiples.len() == 2 {
if them_multiples.len() == 2 {
if me_multiples[0].0 != them_multiples[0].0 {
return me_multiples[0].0 > them_multiples[0].0;
} else if me_multiples[1].0 != them_multiples[1].0 {
return me_multiples[1].0 > them_multiples[1].0;
}
// go for high card
j = me_ranks.len();
for _i in 0..me_ranks.len() {
j -= 1;
if me_ranks[j] != them_ranks[j] {
return me_ranks[j] > them_ranks[j];
}
}
return false; // draw
} else {
return true; // I win
}
} else if them_multiples.len() == 2 {
return false; // they win
}
// check for a pair
if me_multiples.len() > 0 {
if them_multiples.len() > 0 {
if me_multiples[0].0 != them_multiples[0].0 {
return me_multiples[0].0 > them_multiples[0].0;
}
// go for high card
j = me_ranks.len();
for _i in 0..me_ranks.len() {
j -= 1;
if me_ranks[j] != them_ranks[j] {
return me_ranks[j] > them_ranks[j];
}
}
return false; // draw
}
return true;
} else if them_multiples.len() > 0 {
return false;
}
// go for high card
j = me_ranks.len();
for _i in 0..me_ranks.len() {
j -= 1;
if me_ranks[j] != them_ranks[j] {
return me_ranks[j] > them_ranks[j];
}
}
false
}
}
fn parse_line(line: &str) -> (Hand, Hand) {
// println!( "{}", line );
let hand_text = line.split_whitespace();
let mut cards: Vec<Card> = Vec::new();
for card_text in hand_text {
let bytes = card_text.as_bytes();
let suit: u8 = match bytes[1] {
0x43 => CLUBS, // 'C'
0x44 => DIAMONDS, // 'D'
0x53 => HEARTS, // 'S'
_ => SPADES,
}; // 'H'
let rank: u8 = match bytes[0] {
0x32 => 0,
0x33 => 1,
0x34 => 2,
0x35 => 3,
0x36 => 4,
0x37 => 5,
0x38 => 6,
0x39 => 7,
0x54 => 8,
0x4A => 9,
0x51 => 10,
0x4B => 11,
0x41 => 12,
_ => 13,
};
cards.push(Card { suit, rank });
}
let player1: Hand = Hand {
cards: [
cards.remove(0),
cards.remove(0),
cards.remove(0),
cards.remove(0),
cards.remove(0),
],
};
let player2: Hand = Hand {
cards: [
cards.remove(0),
cards.remove(0),
cards.remove(0),
cards.remove(0),
cards.remove(0),
],
};
(player1, player2)
}
pub fn p54() -> u64 {
let mut player1_wins = 0;
// open the file
let mut file = File::open("p054_poker.txt").expect("file not found");
// read the file into a string
let mut s = String::new();
file.read_to_string(&mut s).expect("could not read file");
let lines = s.lines();
for line in lines {
// parse the data
let (player1, player2) = parse_line(line);
// work out who wins
if player1.beats(player2) {
player1_wins += 1;
}
}
// return player 1 count
player1_wins
}
|
// Copyright 2020 IOTA Stiftung
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and limitations under the License.
use crate::convert;
use num_traits::{CheckedAdd, CheckedSub, Num};
use std::{convert::TryFrom, fmt, hash};
/// Balanced trits.
pub mod balanced;
/// Unbalanced trits.
pub mod unbalanced;
// Reexports
pub use self::{balanced::Btrit, unbalanced::Utrit};
/// A trait implemented by both balanced ([`Btrit`]) and unbalanced ([`Utrit`]) trits.
pub trait Trit:
Copy + Sized + fmt::Debug + hash::Hash + Into<i8> + Ord + PartialEq + ShiftTernary + TryFrom<i8> + fmt::Display
{
/// Attempt to increment the value of this trit, returning [`None`] if an overflow occurred.
fn checked_increment(self) -> Option<Self>;
/// The zero value of this trit.
fn zero() -> Self;
/// Turn this trit reference into one with an arbitrary lifetime.
///
/// Note that this is largely an implementation detail and is rarely useful for API users.
fn as_arbitrary_ref<'a>(&self) -> &'a Self;
/// Attempt to add this trit to a numeric value.
fn add_to_num<I: Num + CheckedAdd + CheckedSub>(&self, n: I) -> Result<I, convert::Error>;
}
/// A trait implemented by trits that can be shifted between balance domains.
// TODO: Is this a good API?
pub trait ShiftTernary: Sized {
/// The trit type that results from shifting this trit.
type Target: ShiftTernary<Target = Self>;
/// Shift this trit into the opposite balance domain.
fn shift(self) -> Self::Target;
}
|
#![forbid(unsafe_code)]
#[macro_use]
extern crate prost_derive;
/// Protocol for communicating with a daemon managing a central store in multi-user mode.
pub mod daemon {
include!(concat!(env!("OUT_DIR"), "/deck.daemon.v1alpha1.rs"));
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use common_ast::ast::Expr;
use common_ast::ast::Literal;
use common_ast::ast::OrderByExpr;
use common_ast::ast::SelectTarget;
use common_exception::ErrorCode;
use common_exception::Result;
use crate::binder::select::SelectList;
use crate::binder::sort::OrderItem;
use crate::binder::sort::OrderItems;
use crate::normalize_identifier;
use crate::optimizer::SExpr;
use crate::plans::AggregateFunction;
use crate::plans::BoundColumnRef;
use crate::plans::EvalScalar;
use crate::plans::ScalarItem;
use crate::plans::Sort;
use crate::plans::SortItem;
use crate::plans::Window;
use crate::plans::WindowFunc;
use crate::plans::WindowFuncFrame;
use crate::BindContext;
use crate::Binder;
use crate::ColumnBinding;
use crate::IndexType;
use crate::MetadataRef;
use crate::ScalarBinder;
use crate::ScalarExpr;
use crate::Visibility;
impl Binder {
pub(super) async fn fetch_window_order_by_expr(
&mut self,
select_list: &[SelectTarget],
) -> Vec<Vec<OrderByExpr>> {
let mut window_order_bys = vec![];
for select_target in select_list {
match select_target {
SelectTarget::QualifiedName { .. } => continue,
SelectTarget::AliasedExpr { expr, .. } => match expr.as_ref() {
Expr::FunctionCall { window, .. } => {
if let Some(window) = window {
window_order_bys.push(window.order_by.clone());
}
}
_ => continue,
},
}
}
window_order_bys
}
pub(super) async fn fetch_window_order_items(
&mut self,
from_context: &BindContext,
scalar_items: &mut HashMap<IndexType, ScalarItem>,
projections: &[ColumnBinding],
window_order_by: &[OrderByExpr],
) -> Result<OrderItems> {
let mut order_items = Vec::with_capacity(window_order_by.len());
for order in window_order_by {
match &order.expr {
Expr::ColumnRef {
database: ref database_name,
table: ref table_name,
column: ref ident,
..
} => {
// We first search the identifier in select list
let mut found = false;
let database = database_name
.as_ref()
.map(|ident| normalize_identifier(ident, &self.name_resolution_ctx).name);
let table = table_name
.as_ref()
.map(|ident| normalize_identifier(ident, &self.name_resolution_ctx).name);
let column = normalize_identifier(ident, &self.name_resolution_ctx).name;
for item in projections.iter() {
if BindContext::match_column_binding(
database.as_deref(),
table.as_deref(),
column.as_str(),
item,
) {
order_items.push(OrderItem {
expr: order.clone(),
index: item.index,
name: item.column_name.clone(),
need_eval_scalar: scalar_items.get(&item.index).map_or(
false,
|scalar_item| {
!matches!(
&scalar_item.scalar,
ScalarExpr::BoundColumnRef(_)
)
},
),
});
found = true;
break;
}
}
if found {
continue;
}
return Err(ErrorCode::SemanticError(
"for WINDOW FUNCTION, ORDER BY expressions must appear in select list"
.to_string(),
)
.set_span(order.expr.span()));
}
Expr::Literal {
lit: Literal::UInt64(index),
..
} => {
let index = *index as usize - 1;
if index >= projections.len() {
return Err(ErrorCode::SemanticError(format!(
"ORDER BY position {} is not in select list",
index + 1
))
.set_span(order.expr.span()));
}
order_items.push(OrderItem {
expr: order.clone(),
name: projections[index].column_name.clone(),
index: projections[index].index,
need_eval_scalar: scalar_items.get(&projections[index].index).map_or(
false,
|scalar_item| {
!matches!(&scalar_item.scalar, ScalarExpr::BoundColumnRef(_))
},
),
});
}
_ => {
let mut bind_context = from_context.clone();
for column_binding in projections.iter() {
if bind_context.columns.contains(column_binding) {
continue;
}
bind_context.columns.push(column_binding.clone());
}
let mut scalar_binder = ScalarBinder::new(
&mut bind_context,
self.ctx.clone(),
&self.name_resolution_ctx,
self.metadata.clone(),
&[],
);
let (bound_expr, _) = scalar_binder.bind(&order.expr).await?;
let rewrite_scalar = self
.rewrite_scalar_with_replacement(&bound_expr, &|nest_scalar| {
if let ScalarExpr::BoundColumnRef(BoundColumnRef { column, .. }) =
nest_scalar
{
if let Some(scalar_item) = scalar_items.get(&column.index) {
return Ok(Some(scalar_item.scalar.clone()));
}
}
Ok(None)
})
.map_err(|e| ErrorCode::SemanticError(e.message()))?;
let column_binding = self.create_column_binding(
None,
None,
format!("{:#}", order.expr),
rewrite_scalar.data_type()?,
);
order_items.push(OrderItem {
expr: order.clone(),
name: column_binding.column_name.clone(),
index: column_binding.index,
need_eval_scalar: true,
});
scalar_items.insert(column_binding.index, ScalarItem {
scalar: rewrite_scalar,
index: column_binding.index,
});
}
}
}
Ok(OrderItems { items: order_items })
}
pub(crate) async fn bind_window_order_by(
&mut self,
_from_context: &BindContext,
order_by: OrderItems,
_select_list: &SelectList<'_>,
scalar_items: &mut HashMap<IndexType, ScalarItem>,
child: SExpr,
) -> Result<SExpr> {
let mut order_by_items = Vec::with_capacity(order_by.items.len());
let mut scalars = vec![];
for order in order_by.items {
if let Expr::ColumnRef {
database: ref database_name,
table: ref table_name,
..
} = order.expr.expr
{
if let (Some(table_name), Some(database_name)) = (table_name, database_name) {
let catalog_name = self.ctx.get_current_catalog();
let catalog = self.ctx.get_catalog(catalog_name.as_str())?;
catalog
.get_table(
&self.ctx.get_tenant(),
&database_name.name,
&table_name.name,
)
.await?;
}
}
if order.need_eval_scalar {
if let Entry::Occupied(entry) = scalar_items.entry(order.index) {
let (index, item) = entry.remove_entry();
scalars.push(ScalarItem {
scalar: item.scalar,
index,
});
}
}
// null is the largest value in databend, smallest in hive
// todo: rewrite after https://github.com/jorgecarleitao/arrow2/pull/1286 is merged
let default_nulls_first = !self
.ctx
.get_settings()
.get_sql_dialect()
.unwrap()
.is_null_biggest();
let order_by_item = SortItem {
index: order.index,
asc: order.expr.asc.unwrap_or(true),
nulls_first: order.expr.nulls_first.unwrap_or(default_nulls_first),
};
order_by_items.push(order_by_item);
}
let mut new_expr = if !scalars.is_empty() {
let eval_scalar = EvalScalar { items: scalars };
SExpr::create_unary(eval_scalar.into(), child)
} else {
child
};
let sort_plan = Sort {
items: order_by_items,
limit: None,
};
new_expr = SExpr::create_unary(sort_plan.into(), new_expr);
Ok(new_expr)
}
pub(super) async fn bind_window_function(
&mut self,
window_info: &WindowInfo,
child: SExpr,
) -> Result<SExpr> {
// Build a ProjectPlan, which will produce aggregate arguments and window partitions
let mut scalar_items: Vec<ScalarItem> = Vec::with_capacity(
window_info.aggregate_arguments.len() + window_info.partition_by_items.len(),
);
for arg in window_info.aggregate_arguments.iter() {
scalar_items.push(arg.clone());
}
for part in window_info.partition_by_items.iter() {
scalar_items.push(part.clone());
}
let mut new_expr = child;
if !scalar_items.is_empty() {
let eval_scalar = EvalScalar {
items: scalar_items,
};
new_expr = SExpr::create_unary(eval_scalar.into(), new_expr);
}
let window_plan = Window {
aggregate_function: window_info.aggregate_function.clone(),
partition_by: window_info.partition_by_items.clone(),
frame: window_info.frame.clone(),
};
new_expr = SExpr::create_unary(window_plan.into(), new_expr);
Ok(new_expr)
}
/// Analyze window functions in select clause, this will rewrite window functions.
pub(crate) fn analyze_window_select(
&mut self,
bind_context: &mut BindContext,
select_list: &mut SelectList,
) -> Result<()> {
for item in select_list.items.iter_mut() {
if let ScalarExpr::WindowFunction(window_func) = &item.scalar {
let new_scalar =
self.replace_window_function(bind_context, self.metadata.clone(), window_func)?;
item.scalar = new_scalar;
}
}
Ok(())
}
fn replace_window_function(
&mut self,
bind_context: &mut BindContext,
metadata: MetadataRef,
window: &WindowFunc,
) -> Result<ScalarExpr> {
let window_infos = &mut bind_context.windows;
let mut replaced_args: Vec<ScalarExpr> = Vec::with_capacity(window.agg_func.args.len());
let mut replaced_partition_items: Vec<ScalarExpr> =
Vec::with_capacity(window.partition_by.len());
// resolve aggregate function args in window function.
let mut agg_args = vec![];
for (i, arg) in window.agg_func.args.iter().enumerate() {
let name = format!("{}_arg_{}", &window.agg_func.func_name, i);
if let ScalarExpr::BoundColumnRef(column_ref) = arg {
replaced_args.push(column_ref.clone().into());
agg_args.push(ScalarItem {
index: column_ref.column.index,
scalar: arg.clone(),
});
} else {
let index = metadata
.write()
.add_derived_column(name.clone(), arg.data_type()?);
// Generate a ColumnBinding for each argument of aggregates
let column_binding = ColumnBinding {
database_name: None,
table_name: None,
column_name: name,
index,
data_type: Box::new(arg.data_type()?),
visibility: Visibility::Visible,
};
replaced_args.push(
BoundColumnRef {
span: arg.span(),
column: column_binding.clone(),
}
.into(),
);
agg_args.push(ScalarItem {
index,
scalar: arg.clone(),
});
}
}
// resolve partition by
let mut partition_by_items = vec![];
for (i, part) in window.partition_by.iter().enumerate() {
let name = format!("{}_part_{}", &window.agg_func.func_name, i);
if let ScalarExpr::BoundColumnRef(column_ref) = part {
replaced_partition_items.push(column_ref.clone().into());
partition_by_items.push(ScalarItem {
index: column_ref.column.index,
scalar: part.clone(),
});
} else {
let index = metadata
.write()
.add_derived_column(name.clone(), part.data_type()?);
// Generate a ColumnBinding for each argument of aggregates
let column_binding = ColumnBinding {
database_name: None,
table_name: None,
column_name: name,
index,
data_type: Box::new(part.data_type()?),
visibility: Visibility::Visible,
};
replaced_partition_items.push(
BoundColumnRef {
span: part.span(),
column: column_binding.clone(),
}
.into(),
);
partition_by_items.push(ScalarItem {
index,
scalar: part.clone(),
});
}
}
let index = metadata
.write()
.add_derived_column(window.display_name(), *window.agg_func.return_type.clone());
let replaced_agg = AggregateFunction {
display_name: window.agg_func.display_name.clone(),
func_name: window.agg_func.func_name.clone(),
distinct: window.agg_func.distinct,
params: window.agg_func.params.clone(),
args: replaced_args,
return_type: window.agg_func.return_type.clone(),
};
// create window info
let window_info = WindowInfo {
aggregate_function: ScalarItem {
scalar: replaced_agg.clone().into(),
index,
},
aggregate_arguments: agg_args,
partition_by_items,
frame: window.frame.clone(),
};
// push window info to BindContext
window_infos.push(window_info);
let replaced_window = WindowFunc {
agg_func: replaced_agg,
partition_by: replaced_partition_items,
frame: window.frame.clone(),
};
Ok(replaced_window.into())
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct WindowInfo {
pub aggregate_function: ScalarItem,
pub aggregate_arguments: Vec<ScalarItem>,
pub partition_by_items: Vec<ScalarItem>,
pub frame: WindowFuncFrame,
}
|
pub use lzma_sys::*;
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use chrono_tz::Tz;
use common_exception::ErrorCode;
use common_exception::Result;
use common_sql::plans::SettingPlan;
use crate::interpreters::Interpreter;
use crate::pipelines::PipelineBuildResult;
use crate::sessions::QueryAffect;
use crate::sessions::QueryContext;
use crate::sessions::TableContext;
pub struct SettingInterpreter {
ctx: Arc<QueryContext>,
set: SettingPlan,
}
impl SettingInterpreter {
pub fn try_create(ctx: Arc<QueryContext>, set: SettingPlan) -> Result<Self> {
Ok(SettingInterpreter { ctx, set })
}
}
#[async_trait::async_trait]
impl Interpreter for SettingInterpreter {
fn name(&self) -> &str {
"SettingInterpreter"
}
async fn execute2(&self) -> Result<PipelineBuildResult> {
let plan = self.set.clone();
let mut keys: Vec<String> = vec![];
let mut values: Vec<String> = vec![];
let mut is_globals: Vec<bool> = vec![];
for var in plan.vars {
let ok = match var.variable.to_lowercase().as_str() {
// To be compatible with some drivers
"sql_mode" | "autocommit" => false,
"timezone" => {
// check if the timezone is valid
let tz = var.value.trim_matches(|c| c == '\'' || c == '\"');
let _ = tz.parse::<Tz>().map_err(|_| {
ErrorCode::InvalidTimezone(format!("Invalid Timezone: {}", var.value))
})?;
self.ctx.get_settings().set_settings(
var.variable.clone(),
tz.to_string(),
var.is_global,
)?;
true
}
_ => {
self.ctx.get_settings().set_settings(
var.variable.clone(),
var.value.clone(),
var.is_global,
)?;
true
}
};
if ok {
keys.push(var.variable.clone());
values.push(var.value.clone());
is_globals.push(var.is_global);
}
}
self.ctx.set_affect(QueryAffect::ChangeSettings {
keys,
values,
is_globals,
});
Ok(PipelineBuildResult::create())
}
}
|
use crate::prelude::*;
use std::os::raw::c_void;
use std::ptr;
#[repr(C)]
#[derive(Debug)]
pub struct VkMappedMemoryRange {
pub sType: VkStructureType,
pub pNext: *const c_void,
pub memory: VkDeviceMemory,
pub offset: VkDeviceSize,
pub size: VkDeviceSize,
}
impl VkMappedMemoryRange {
pub fn new(memory: VkDeviceMemory, offset: VkDeviceSize, size: VkDeviceSize) -> Self {
VkMappedMemoryRange {
sType: VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
pNext: ptr::null(),
memory,
offset,
size,
}
}
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type SceneLightingEffect = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SceneLightingEffectReflectanceModel(pub i32);
impl SceneLightingEffectReflectanceModel {
pub const BlinnPhong: Self = Self(0i32);
pub const PhysicallyBasedBlinnPhong: Self = Self(1i32);
}
impl ::core::marker::Copy for SceneLightingEffectReflectanceModel {}
impl ::core::clone::Clone for SceneLightingEffectReflectanceModel {
fn clone(&self) -> Self {
*self
}
}
|
//! Implements custom visitors to handle de-serializing string or vec
//! of strings.
use serde::de;
use serde::Deserialize;
use std::fmt;
use std::marker::PhantomData;
/// Actually utilized [OptionalVecOrStringVisitor]. Used with serde's attribute
/// macro.
///
/// e.g.:
/// ```ignore
/// #[serde(default)]
/// #[serde(deserialize_with = "stringorvec::deserialize_optional_vec_or_string")]
/// pub on_project_start: Option<Vec<String>>,
/// ```
pub fn deserialize_optional_vec_or_string<'de, D>(d: D) -> Result<Option<Vec<String>>, D::Error>
where
D: de::Deserializer<'de>,
{
d.deserialize_option(OptionalVecOrStringVisitor)
}
/// Visitor de-serializing optional string (to a vec of single
/// string if specified) or optional vec of strings.
struct OptionalVecOrStringVisitor;
impl<'de> de::Visitor<'de> for OptionalVecOrStringVisitor {
type Value = Option<Vec<String>>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "Optional String or Vec of Strings")
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(None)
}
fn visit_some<D>(self, d: D) -> Result<Option<Vec<String>>, D::Error>
where
D: de::Deserializer<'de>,
{
Ok(Some(d.deserialize_any(StringOrVecVisitor(PhantomData))?))
}
}
/// Visitor de-serializing string (to a vec of single string) or vec of strings.
/// Visited from [OptionalVecOrStringVisitor].
struct StringOrVecVisitor(PhantomData<Vec<String>>);
impl<'de> de::Visitor<'de> for StringOrVecVisitor {
type Value = Vec<String>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("string or list of strings")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
Ok(vec![value.to_owned()])
}
fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error>
where
S: de::SeqAccess<'de>,
{
Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use cm_fidl_validator;
use cm_json::{self, cm, Error};
use fidl_fuchsia_data as fdata;
use fidl_fuchsia_sys2::{
CapabilityType, ChildDecl, ComponentDecl, ExposeDecl, OfferDecl, OfferTarget, Relation,
RelativeId, UseDecl,
};
use serde_json::{Map, Value};
/// Converts the contents of a CM file and produces the equivalent FIDL.
/// The mapping between CM-JSON and CM-FIDL is 1-1. The only difference is the language semantics
/// used to express particular data structures.
/// This function also applies cm_fidl_validator to the generated FIDL.
pub fn translate(buffer: &str) -> Result<ComponentDecl, Error> {
let json = cm_json::from_json_str(&buffer)?;
cm_json::validate_json(&json, cm_json::CM_SCHEMA)?;
let document: cm::Document = serde_json::from_str(&buffer)
.map_err(|e| Error::parse(format!("Couldn't read input as struct: {}", e)))?;
let decl = translate_cm(document)?;
cm_fidl_validator::validate(&decl).map_err(|errs| {
let errs_str: Vec<String> = errs.iter().map(|e| format!("{}", e)).collect();
Error::parse(errs_str.join(","))
})?;
Ok(decl)
}
fn translate_cm(document: cm::Document) -> Result<ComponentDecl, Error> {
// We can't deserialize into a ComponentDecl fidl directly because the autogenerated rust
// bindings do not contain the necessary attributes to deserialize correctly. Also, the
// deserialization of freeform objects to dictionaries is not trivial.
let mut out = new_component_decl();
if let Some(program) = document.program {
out.program = Some(dictionary_from_map(program)?);
}
if let Some(uses) = document.uses {
out.uses = Some(translate_uses(uses)?);
}
if let Some(exposes) = document.exposes {
out.exposes = Some(translate_exposes(exposes)?);
}
if let Some(offers) = document.offers {
out.offers = Some(translate_offers(offers)?);
}
if let Some(children) = document.children {
out.children = Some(translate_children(children)?);
}
if let Some(facets) = document.facets {
out.facets = Some(dictionary_from_map(facets)?);
}
Ok(out)
}
fn translate_uses(use_in: Vec<cm::Use>) -> Result<Vec<UseDecl>, Error> {
let mut out_uses = vec![];
for use_ in use_in {
let type_ = capability_from_str(&use_.r#type)?;
out_uses.push(UseDecl {
type_: Some(type_),
source_path: Some(use_.source_path),
target_path: Some(use_.target_path),
});
}
Ok(out_uses)
}
fn translate_exposes(expose_in: Vec<cm::Expose>) -> Result<Vec<ExposeDecl>, Error> {
let mut out_exposes = vec![];
for expose in expose_in {
let type_ = capability_from_str(&expose.r#type)?;
let source = RelativeId {
relation: Some(relation_from_str(&expose.source.relation)?),
child_name: expose.source.child_name,
};
out_exposes.push(ExposeDecl {
type_: Some(type_),
source_path: Some(expose.source_path),
source: Some(source),
target_path: Some(expose.target_path),
});
}
Ok(out_exposes)
}
fn translate_offers(offer_in: Vec<cm::Offer>) -> Result<Vec<OfferDecl>, Error> {
let mut out_offers = vec![];
for offer in offer_in {
let type_ = capability_from_str(&offer.r#type)?;
let source = RelativeId {
relation: Some(relation_from_str(&offer.source.relation)?),
child_name: offer.source.child_name,
};
let mut out_targets = vec![];
for target in offer.targets {
out_targets.push(OfferTarget {
target_path: Some(target.target_path),
child_name: Some(target.child_name),
});
}
out_offers.push(OfferDecl {
type_: Some(type_),
source_path: Some(offer.source_path),
source: Some(source),
targets: Some(out_targets),
});
}
Ok(out_offers)
}
fn translate_children(children_in: Vec<cm::Child>) -> Result<Vec<ChildDecl>, Error> {
let mut out_children = vec![];
for child in children_in {
out_children.push(ChildDecl { name: Some(child.name), uri: Some(child.uri) });
}
Ok(out_children)
}
fn dictionary_from_map(in_obj: Map<String, serde_json::Value>) -> Result<fdata::Dictionary, Error> {
let mut dict = fdata::Dictionary { entries: vec![] };
do_dictionary_from_map(in_obj, &mut dict)?;
Ok(dict)
}
fn do_dictionary_from_map(
in_obj: Map<String, Value>,
out: &mut fdata::Dictionary,
) -> Result<(), Error> {
for (k, v) in in_obj {
if let Some(value) = convert_value(v)? {
out.entries.push(fdata::Entry { key: k, value: Some(value) });
}
}
Ok(())
}
fn convert_value(v: Value) -> Result<Option<Box<fdata::Value>>, Error> {
Ok(match v {
Value::Null => None,
Value::Bool(b) => Some(Box::new(fdata::Value::Bit(b))),
Value::Number(n) => {
if let Some(i) = n.as_i64() {
Some(Box::new(fdata::Value::Inum(i)))
} else if let Some(f) = n.as_f64() {
Some(Box::new(fdata::Value::Fnum(f)))
} else {
return Err(Error::Parse(format!("Number is out of range: {}", n)));
}
}
Value::String(s) => Some(Box::new(fdata::Value::Str(s.clone()))),
Value::Array(a) => {
let mut values = vec![];
for v in a {
if let Some(value) = convert_value(v)? {
values.push(Some(value));
}
}
let vector = fdata::Vector { values };
Some(Box::new(fdata::Value::Vec(vector)))
}
Value::Object(o) => {
let mut dict = fdata::Dictionary { entries: vec![] };
do_dictionary_from_map(o, &mut dict)?;
Some(Box::new(fdata::Value::Dict(dict)))
}
})
}
fn capability_from_str(value: &str) -> Result<CapabilityType, Error> {
match value {
cm::SERVICE => Ok(CapabilityType::Service),
cm::DIRECTORY => Ok(CapabilityType::Directory),
_ => Err(Error::parse(format!("Unknown capability type: {}", value))),
}
}
fn relation_from_str(value: &str) -> Result<Relation, Error> {
match value {
cm::REALM => Ok(Relation::Realm),
cm::SELF => Ok(Relation::Myself),
cm::CHILD => Ok(Relation::Child),
_ => Err(Error::parse(format!("Unknown relation: {}", value))),
}
}
fn new_component_decl() -> ComponentDecl {
ComponentDecl {
program: None,
uses: None,
exposes: None,
offers: None,
facets: None,
children: None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use cm_json::CM_SCHEMA;
use serde_json::json;
fn translate_test(input: serde_json::value::Value, expected_output: ComponentDecl) {
let component_decl = translate(&format!("{}", input)).expect("translation failed");
assert_eq!(component_decl, expected_output);
}
macro_rules! test_translate {
(
$(
$test_name:ident => {
input = $input:expr,
output = $output:expr,
},
)+
) => {
$(
#[test]
fn $test_name() {
translate_test($input, $output);
}
)+
}
}
#[test]
fn test_translate_invalid_cm_fails() {
let input = json!({
"exposes": [
{
"type": "nothing",
"source_path": "/svc/fuchsia.logger.Log",
"source": {
"relation": "self"
},
"target_path": "/svc/fuchsia.logger.Log"
}
]
});
let expected_res: Result<ComponentDecl, Error> = Err(Error::validate_schema(
CM_SCHEMA,
"Pattern condition is not met at /exposes/0/type",
));
let res = translate(&format!("{}", input));
assert_eq!(format!("{:?}", res), format!("{:?}", expected_res));
}
test_translate! {
test_translate_empty => {
input = json!({}),
output = new_component_decl(),
},
test_translate_program => {
input = json!({
"program": {
"binary": "bin/app"
}
}),
output = {
let program = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "binary".to_string(),
value: Some(Box::new(fdata::Value::Str("bin/app".to_string()))),
}
]};
let mut decl = new_component_decl();
decl.program = Some(program);
decl
},
},
test_translate_dictionary_primitive => {
input = json!({
"program": {
"string": "bar",
"int": -42,
"float": 3.14,
"bool": true,
"ignore": null
}
}),
output = {
let program = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "bool".to_string(),
value: Some(Box::new(fdata::Value::Bit(true))),
},
fdata::Entry{
key: "float".to_string(),
value: Some(Box::new(fdata::Value::Fnum(3.14))),
},
fdata::Entry{
key: "int".to_string(),
value: Some(Box::new(fdata::Value::Inum(-42))),
},
fdata::Entry{
key: "string".to_string(),
value: Some(Box::new(fdata::Value::Str("bar".to_string()))),
},
]};
let mut decl = new_component_decl();
decl.program = Some(program);
decl
},
},
test_translate_dictionary_nested => {
input = json!({
"program": {
"obj": {
"array": [
{
"string": "bar"
},
-42
],
},
"bool": true
}
}),
output = {
let dict_inner = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "string".to_string(),
value: Some(Box::new(fdata::Value::Str("bar".to_string()))),
},
]};
let vector = fdata::Vector{values: vec![
Some(Box::new(fdata::Value::Dict(dict_inner))),
Some(Box::new(fdata::Value::Inum(-42)))
]};
let dict_outer = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "array".to_string(),
value: Some(Box::new(fdata::Value::Vec(vector))),
},
]};
let program = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "bool".to_string(),
value: Some(Box::new(fdata::Value::Bit(true))),
},
fdata::Entry{
key: "obj".to_string(),
value: Some(Box::new(fdata::Value::Dict(dict_outer))),
},
]};
let mut decl = new_component_decl();
decl.program = Some(program);
decl
},
},
test_translate_uses => {
input = json!({
"uses": [
{
"type": "service",
"source_path": "/fonts/CoolFonts",
"target_path": "/svc/fuchsia.fonts.Provider"
},
{
"type": "directory",
"source_path": "/data/assets",
"target_path": "/data/assets"
}
]
}),
output = {
let uses = vec![
UseDecl{
type_: Some(CapabilityType::Service),
source_path: Some("/fonts/CoolFonts".to_string()),
target_path: Some("/svc/fuchsia.fonts.Provider".to_string()),
},
UseDecl{
type_: Some(CapabilityType::Directory),
source_path: Some("/data/assets".to_string()),
target_path: Some("/data/assets".to_string()),
},
];
let mut decl = new_component_decl();
decl.uses = Some(uses);
decl
},
},
test_translate_exposes => {
input = json!({
"exposes": [
{
"type": "service",
"source_path": "/loggers/fuchsia.logger.Log",
"source": {
"relation": "child",
"child_name": "logger"
},
"target_path": "/svc/fuchsia.logger.Log"
},
{
"type": "directory",
"source_path": "/volumes/blobfs",
"source": {
"relation": "self"
},
"target_path": "/volumes/blobfs"
}
],
"children": [
{
"name": "logger",
"uri": "fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm"
}
]
}),
output = {
let exposes = vec![
ExposeDecl{
type_: Some(CapabilityType::Service),
source_path: Some("/loggers/fuchsia.logger.Log".to_string()),
source: Some(RelativeId{
relation: Some(Relation::Child),
child_name: Some("logger".to_string()),
}),
target_path: Some("/svc/fuchsia.logger.Log".to_string()),
},
ExposeDecl{
type_: Some(CapabilityType::Directory),
source_path: Some("/volumes/blobfs".to_string()),
source: Some(RelativeId{
relation: Some(Relation::Myself),
child_name: None,
}),
target_path: Some("/volumes/blobfs".to_string()),
},
];
let children = vec![
ChildDecl{
name: Some("logger".to_string()),
uri: Some("fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm".to_string()),
},
];
let mut decl = new_component_decl();
decl.exposes = Some(exposes);
decl.children = Some(children);
decl
},
},
test_translate_offers => {
input = json!({
"offers": [
{
"type": "directory",
"source_path": "/data/assets",
"source": {
"relation": "realm"
},
"targets": [
{
"target_path": "/data/realm_assets",
"child_name": "logger"
},
{
"target_path": "/data/assets",
"child_name": "netstack"
}
]
},
{
"type": "directory",
"source_path": "/data/config",
"source": {
"relation": "self"
},
"targets": [
{
"target_path": "/data/config",
"child_name": "netstack"
}
]
},
{
"type": "service",
"source_path": "/svc/fuchsia.logger.Log",
"source": {
"relation": "child",
"child_name": "logger"
},
"targets": [
{
"target_path": "/svc/fuchsia.logger.SysLog",
"child_name": "netstack"
}
]
}
],
"children": [
{
"name": "logger",
"uri": "fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm"
},
{
"name": "netstack",
"uri": "fuchsia-pkg://fuchsia.com/netstack/stable#meta/netstack.cm"
}
],
}),
output = {
let offers = vec![
OfferDecl{
type_: Some(CapabilityType::Directory),
source_path: Some("/data/assets".to_string()),
source: Some(RelativeId{
relation: Some(Relation::Realm),
child_name: None
}),
targets: Some(vec![
OfferTarget{
target_path: Some("/data/realm_assets".to_string()),
child_name: Some("logger".to_string()),
},
OfferTarget{
target_path: Some("/data/assets".to_string()),
child_name: Some("netstack".to_string()),
},
]),
},
OfferDecl{
type_: Some(CapabilityType::Directory),
source_path: Some("/data/config".to_string()),
source: Some(RelativeId{
relation: Some(Relation::Myself),
child_name: None
}),
targets: Some(vec![
OfferTarget{
target_path: Some("/data/config".to_string()),
child_name: Some("netstack".to_string()),
},
]),
},
OfferDecl{
type_: Some(CapabilityType::Service),
source_path: Some("/svc/fuchsia.logger.Log".to_string()),
source: Some(RelativeId{
relation: Some(Relation::Child),
child_name: Some("logger".to_string()),
}),
targets: Some(vec![
OfferTarget{
target_path: Some("/svc/fuchsia.logger.SysLog".to_string()),
child_name: Some("netstack".to_string()),
},
]),
},
];
let children = vec![
ChildDecl{
name: Some("logger".to_string()),
uri: Some("fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm".to_string()),
},
ChildDecl{
name: Some("netstack".to_string()),
uri: Some("fuchsia-pkg://fuchsia.com/netstack/stable#meta/netstack.cm".to_string()),
},
];
let mut decl = new_component_decl();
decl.offers = Some(offers);
decl.children = Some(children);
decl
},
},
test_translate_children => {
input = json!({
"children": [
{
"name": "logger",
"uri": "fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm"
},
{
"name": "echo_server",
"uri": "fuchsia-pkg://fuchsia.com/echo_server/stable#meta/echo_server.cm"
}
]
}),
output = {
let children = vec![
ChildDecl{
name: Some("logger".to_string()),
uri: Some("fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm".to_string()),
},
ChildDecl{
name: Some("echo_server".to_string()),
uri: Some("fuchsia-pkg://fuchsia.com/echo_server/stable#meta/echo_server.cm".to_string()),
},
];
let mut decl = new_component_decl();
decl.children = Some(children);
decl
},
},
test_translate_facets => {
input = json!({
"facets": {
"authors": [
"me",
"you"
],
"title": "foo",
"year": 2018
}
}),
output = {
let vector = fdata::Vector{values: vec![
Some(Box::new(fdata::Value::Str("me".to_string()))),
Some(Box::new(fdata::Value::Str("you".to_string()))),
]};
let facets = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "authors".to_string(),
value: Some(Box::new(fdata::Value::Vec(vector))),
},
fdata::Entry{
key: "title".to_string(),
value: Some(Box::new(fdata::Value::Str("foo".to_string()))),
},
fdata::Entry{
key: "year".to_string(),
value: Some(Box::new(fdata::Value::Inum(2018))),
},
]};
let mut decl = new_component_decl();
decl.facets = Some(facets);
decl
},
},
test_translate_all_sections => {
input = json!({
"program": {
"binary": "bin/app"
},
"uses": [
{
"type": "service",
"source_path": "/fonts/CoolFonts",
"target_path": "/svc/fuchsia.fonts.Provider"
}
],
"exposes": [
{
"type": "directory",
"source_path": "/volumes/blobfs",
"source": {
"relation": "self"
},
"target_path": "/volumes/blobfs"
}
],
"offers": [
{
"type": "service",
"source_path": "/svc/fuchsia.logger.Log",
"source": {
"relation": "child",
"child_name": "logger"
},
"targets": [
{
"target_path": "/svc/fuchsia.logger.Log",
"child_name": "netstack"
}
]
}
],
"children": [
{
"name": "logger",
"uri": "fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm"
},
{
"name": "netstack",
"uri": "fuchsia-pkg://fuchsia.com/netstack/stable#meta/netstack.cm"
}
],
"facets": {
"author": "Fuchsia",
"year": 2018
}
}),
output = {
let program = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "binary".to_string(),
value: Some(Box::new(fdata::Value::Str("bin/app".to_string()))),
},
]};
let uses = vec![
UseDecl{
type_: Some(CapabilityType::Service),
source_path: Some("/fonts/CoolFonts".to_string()),
target_path: Some("/svc/fuchsia.fonts.Provider".to_string()),
},
];
let exposes = vec![
ExposeDecl{
type_: Some(CapabilityType::Directory),
source_path: Some("/volumes/blobfs".to_string()),
source: Some(RelativeId{
relation: Some(Relation::Myself),
child_name: None,
}),
target_path: Some("/volumes/blobfs".to_string()),
},
];
let offers = vec![
OfferDecl{
type_: Some(CapabilityType::Service),
source_path: Some("/svc/fuchsia.logger.Log".to_string()),
source: Some(RelativeId{
relation: Some(Relation::Child),
child_name: Some("logger".to_string()),
}),
targets: Some(vec![
OfferTarget{
target_path: Some("/svc/fuchsia.logger.Log".to_string()),
child_name: Some("netstack".to_string()),
},
]),
},
];
let children = vec![
ChildDecl{
name: Some("logger".to_string()),
uri: Some("fuchsia-pkg://fuchsia.com/logger/stable#meta/logger.cm".to_string()),
},
ChildDecl{
name: Some("netstack".to_string()),
uri: Some("fuchsia-pkg://fuchsia.com/netstack/stable#meta/netstack.cm".to_string()),
},
];
let facets = fdata::Dictionary{entries: vec![
fdata::Entry{
key: "author".to_string(),
value: Some(Box::new(fdata::Value::Str("Fuchsia".to_string()))),
},
fdata::Entry{
key: "year".to_string(),
value: Some(Box::new(fdata::Value::Inum(2018))),
},
]};
ComponentDecl{
program: Some(program),
uses: Some(uses),
exposes: Some(exposes),
offers: Some(offers),
children: Some(children),
facets: Some(facets)
}
},
},
}
}
|
const ALL_KEYS: &'static [&'static str] =
&["KEY_RESERVED",
"KEY_ESC",
"KEY_1",
"KEY_2",
"KEY_3",
"KEY_4",
"KEY_5",
"KEY_6",
"KEY_7",
"KEY_8",
"KEY_9",
"KEY_10",
"KEY_MINUS",
"KEY_EQUAL",
"KEY_BACKSPACE",
"KEY_TAB",
"KEY_Q",
"KEY_W",
"KEY_E",
"KEY_R",
"KEY_T",
"KEY_Y",
"KEY_U",
"KEY_I",
"KEY_O",
"KEY_P",
"KEY_LEFTBRACE",
"KEY_RIGHTBRACE",
"KEY_ENTER",
"KEY_LEFTCTRL",
"KEY_A",
"KEY_S",
"KEY_D",
"KEY_F",
"KEY_G",
"KEY_H",
"KEY_J",
"KEY_K",
"KEY_L",
"KEY_SEMICOLON",
"KEY_APOSTROPHE",
"KEY_GRAVE",
"KEY_LEFTSHIFT",
"KEY_BACKSLASH",
"KEY_Z",
"KEY_X",
"KEY_C",
"KEY_V",
"KEY_B",
"KEY_N",
"KEY_M",
"KEY_COMMA",
"KEY_DOT",
"KEY_SLASH",
"KEY_RIGHTSHIFT",
"KEY_KPASTERISK",
"KEY_LEFTALT",
"KEY_SPACE",
"KEY_CAPSLOCK",
"KEY_F1",
"KEY_F2",
"KEY_F3",
"KEY_F4",
"KEY_F5",
"KEY_F6",
"KEY_F7",
"KEY_F8",
"KEY_F9",
"KEY_F10",
"KEY_NUMLOCK",
"KEY_SCROLLLOCK",
"KEY_KP7",
"KEY_KP8",
"KEY_KP9",
"KEY_KPMINUS",
"KEY_KP4",
"KEY_KP5",
"KEY_KP6",
"KEY_KPPLUS",
"KEY_KP1",
"KEY_KP2",
"KEY_KP3",
"KEY_KP0",
"KEY_KPDOT",
"KEY_ZENKAKUHANKAKU",
"KEY_102ND",
"KEY_F11",
"KEY_F12",
"KEY_RO",
"KEY_KATAKANA",
"KEY_HIRAGANA",
"KEY_HENKAN",
"KEY_KATAKANAHIRAGANA",
"KEY_MUHENKAN",
"KEY_KPJPCOMMA",
"KEY_KPENTER",
"KEY_RIGHTCTRL",
"KEY_KPSLASH",
"KEY_SYSRQ",
"KEY_RIGHTALT",
"KEY_LINEFEED",
"KEY_HOME",
"KEY_UP",
"KEY_PAGEUP",
"KEY_LEFT",
"KEY_RIGHT",
"KEY_END",
"KEY_DOWN",
"KEY_PAGEDOWN",
"KEY_INSERT",
"KEY_DELETE",
"KEY_MACRO",
"KEY_MUTE",
"KEY_VOLUMEDOWN",
"KEY_VOLUMEUP",
"KEY_POWER",
"KEY_KPEQUAL",
"KEY_KPPLUSMINUS",
"KEY_PAUSE",
"KEY_SCALE",
"KEY_KPCOMMA",
"KEY_HANGEUL",
"KEY_HANGUEL",
"KEY_HANJA",
"KEY_YEN",
"KEY_LEFTMETA",
"KEY_RIGHTMETA",
"KEY_COMPOSE",
"KEY_STOP",
"KEY_AGAIN",
"KEY_PROPS",
"KEY_UNDO",
"KEY_FRONT",
"KEY_COPY",
"KEY_OPEN",
"KEY_PASTE",
"KEY_FIND",
"KEY_CUT",
"KEY_HELP",
"KEY_MENU",
"KEY_CALC",
"KEY_SETUP",
"KEY_SLEEP",
"KEY_WAKEUP",
"KEY_FILE",
"KEY_SENDFILE",
"KEY_DELETEFILE",
"KEY_XFER",
"KEY_PROG1",
"KEY_PROG2",
"KEY_WWW",
"KEY_MSDOS",
"KEY_COFFEE",
"KEY_SCREENLOCK",
"KEY_ROTATE_DISPLAY",
"KEY_DIRECTION",
"KEY_CYCLEWINDOWS",
"KEY_MAIL",
"KEY_BOOKMARKS",
"KEY_COMPUTER",
"KEY_BACK",
"KEY_FORWARD",
"KEY_CLOSECD",
"KEY_EJECTCD",
"KEY_EJECTCLOSECD",
"KEY_NEXTSONG",
"KEY_PLAYPAUSE",
"KEY_PREVIOUSSONG",
"KEY_STOPCD",
"KEY_RECORD",
"KEY_REWIND",
"KEY_PHONE",
"KEY_ISO",
"KEY_CONFIG",
"KEY_HOMEPAGE",
"KEY_REFRESH",
"KEY_EXIT",
"KEY_MOVE",
"KEY_EDIT",
"KEY_SCROLLUP",
"KEY_SCROLLDOWN",
"KEY_KPLEFTPAREN",
"KEY_KPRIGHTPAREN",
"KEY_NEW",
"KEY_REDO",
"KEY_F13",
"KEY_F14",
"KEY_F15",
"KEY_F16",
"KEY_F17",
"KEY_F18",
"KEY_F19",
"KEY_F20",
"KEY_F21",
"KEY_F22",
"KEY_F23",
"KEY_F24",
"KEY_PLAYCD",
"KEY_PAUSECD",
"KEY_PROG3",
"KEY_PROG4",
"KEY_DASHBOARD",
"KEY_SUSPEND",
"KEY_CLOSE",
"KEY_PLAY",
"KEY_FASTFORWARD",
"KEY_BASSBOOST",
"KEY_PRINT",
"KEY_HP",
"KEY_CAMERA",
"KEY_SOUND",
"KEY_QUESTION",
"KEY_EMAIL",
"KEY_CHAT",
"KEY_SEARCH",
"KEY_CONNECT",
"KEY_FINANCE",
"KEY_SPORT",
"KEY_SHOP",
"KEY_ALTERASE",
"KEY_CANCEL",
"KEY_BRIGHTNESSDOWN",
"KEY_BRIGHTNESSUP",
"KEY_MEDIA",
"KEY_SWITCHVIDEOMODE",
"KEY_KBDILLUMTOGGLE",
"KEY_KBDILLUMDOWN",
"KEY_KBDILLUMUP",
"KEY_SEND",
"KEY_REPLY",
"KEY_FORWARDMAIL",
"KEY_SAVE",
"KEY_DOCUMENTS",
"KEY_BATTERY",
"KEY_BLUETOOTH",
"KEY_WLAN",
"KEY_UWB",
"KEY_UNKNOWN",
"KEY_VIDEO_NEXT",
"KEY_VIDEO_PREV",
"KEY_BRIGHTNESS_CYCLE",
"KEY_BRIGHTNESS_AUTO",
"KEY_BRIGHTNESS_ZERO",
"KEY_DISPLAY_OFF",
"KEY_WWAN",
"KEY_WIMAX",
"KEY_RFKILL",
"KEY_MICMUTE",
"KEY_OK",
"KEY_SELECT",
"KEY_GOTO",
"KEY_CLEAR",
"KEY_POWER2",
"KEY_OPTION",
"KEY_INFO",
"KEY_TIME",
"KEY_VENDOR",
"KEY_ARCHIVE",
"KEY_PROGRAM",
"KEY_CHANNEL",
"KEY_FAVORITES",
"KEY_EPG",
"KEY_PVR",
"KEY_MHP",
"KEY_LANGUAGE",
"KEY_TITLE",
"KEY_SUBTITLE",
"KEY_ANGLE",
"KEY_ZOOM",
"KEY_MODE",
"KEY_KEYBOARD",
"KEY_SCREEN",
"KEY_PC",
"KEY_TV",
"KEY_TV2",
"KEY_VCR",
"KEY_VCR2",
"KEY_SAT",
"KEY_SAT2",
"KEY_CD",
"KEY_TAPE",
"KEY_RADIO",
"KEY_TUNER",
"KEY_PLAYER",
"KEY_TEXT",
"KEY_DVD",
"KEY_AUX",
"KEY_MP3",
"KEY_AUDIO",
"KEY_VIDEO",
"KEY_DIRECTORY",
"KEY_LIST",
"KEY_MEMO",
"KEY_CALENDAR",
"KEY_RED",
"KEY_GREEN",
"KEY_YELLOW",
"KEY_BLUE",
"KEY_CHANNELUP",
"KEY_CHANNELDOWN",
"KEY_FIRST",
"KEY_LAST",
"KEY_AB",
"KEY_NEXT",
"KEY_RESTART",
"KEY_SLOW",
"KEY_SHUFFLE",
"KEY_BREAK",
"KEY_PREVIOUS",
"KEY_DIGITS",
"KEY_TEEN",
"KEY_TWEN",
"KEY_VIDEOPHONE",
"KEY_GAMES",
"KEY_ZOOMIN",
"KEY_ZOOMOUT",
"KEY_ZOOMRESET",
"KEY_WORDPROCESSOR",
"KEY_EDITOR",
"KEY_SPREADSHEET",
"KEY_GRAPHICSEDITOR",
"KEY_PRESENTATION",
"KEY_DATABASE",
"KEY_NEWS",
"KEY_VOICEMAIL",
"KEY_ADDRESSBOOK",
"KEY_MESSENGER",
"KEY_DISPLAYTOGGLE",
"KEY_BRIGHTNESS_TOGGLE",
"KEY_SPELLCHECK",
"KEY_LOGOFF",
"KEY_DOLLAR",
"KEY_EURO",
"KEY_FRAMEBACK",
"KEY_FRAMEFORWARD",
"KEY_CONTEXT_MENU",
"KEY_MEDIA_REPEAT",
"KEY_10CHANNELSUP",
"KEY_10CHANNELSDOWN",
"KEY_IMAGES",
"KEY_DEL_EOL",
"KEY_DEL_EOS",
"KEY_INS_LINE",
"KEY_DEL_LINE",
"KEY_FN",
"KEY_FN_ESC",
"KEY_FN_F1",
"KEY_FN_F2",
"KEY_FN_F3",
"KEY_FN_F4",
"KEY_FN_F5",
"KEY_FN_F6",
"KEY_FN_F7",
"KEY_FN_F8",
"KEY_FN_F9",
"KEY_FN_F10",
"KEY_FN_F11",
"KEY_FN_F12",
"KEY_FN_1",
"KEY_FN_2",
"KEY_FN_D",
"KEY_FN_E",
"KEY_FN_F",
"KEY_FN_S",
"KEY_FN_B",
"KEY_BRL_DOT1",
"KEY_BRL_DOT2",
"KEY_BRL_DOT3",
"KEY_BRL_DOT4",
"KEY_BRL_DOT5",
"KEY_BRL_DOT6",
"KEY_BRL_DOT7",
"KEY_BRL_DOT8",
"KEY_BRL_DOT9",
"KEY_BRL_DOT10",
"KEY_NUMERIC_0",
"KEY_NUMERIC_1",
"KEY_NUMERIC_2",
"KEY_NUMERIC_3",
"KEY_NUMERIC_4",
"KEY_NUMERIC_5",
"KEY_NUMERIC_6",
"KEY_NUMERIC_7",
"KEY_NUMERIC_8",
"KEY_NUMERIC_9",
"KEY_NUMERIC_STAR",
"KEY_NUMERIC_POUND",
"KEY_NUMERIC_A",
"KEY_NUMERIC_B",
"KEY_NUMERIC_C",
"KEY_NUMERIC_D",
"KEY_CAMERA_FOCUS",
"KEY_WPS_BUTTON",
"KEY_TOUCHPAD_TOGGLE",
"KEY_TOUCHPAD_ON",
"KEY_TOUCHPAD_OFF",
"KEY_CAMERA_ZOOMIN",
"KEY_CAMERA_ZOOMOUT",
"KEY_CAMERA_UP",
"KEY_CAMERA_DOWN",
"KEY_CAMERA_LEFT",
"KEY_CAMERA_RIGHT",
"KEY_ATTENDANT_ON",
"KEY_ATTENDANT_OFF",
"KEY_ATTENDANT_TOGGLE",
"KEY_LIGHTS_TOGGLE",
"KEY_ALS_TOGGLE",
"KEY_BUTTONCONFIG",
"KEY_TASKMANAGER",
"KEY_JOURNAL",
"KEY_CONTROLPANEL",
"KEY_APPSELECT",
"KEY_SCREENSAVER",
"KEY_VOICECOMMAND",
"KEY_BRIGHTNESS_MIN",
"KEY_BRIGHTNESS_MAX",
"KEY_KBDINPUTASSIST_PREV",
"KEY_KBDINPUTASSIST_NEXT",
"KEY_KBDINPUTASSIST_PREVGROUP",
"KEY_KBDINPUTASSIST_NEXTGROUP",
"KEY_KBDINPUTASSIST_ACCEPT",
"KEY_KBDINPUTASSIST_CANCEL",
"KEY_MIN_INTERESTING",
"KEY_MAX",
"KEY_CNT"];
pub fn code_to_name(x: i32) -> &'static str {
for k in ALL_KEYS {
if name_to_code(k) == x {
return *k;
}
}
panic!("Keycode not recognized {}", x);
}
pub fn name_to_code(s: &str) -> i32 {
match s {
_ if ALL_KEYS[0].eq(s) => 0,
_ if ALL_KEYS[1].eq(s) => 1,
_ if ALL_KEYS[2].eq(s) => 2,
_ if ALL_KEYS[3].eq(s) => 3,
_ if ALL_KEYS[4].eq(s) => 4,
_ if ALL_KEYS[5].eq(s) => 5,
_ if ALL_KEYS[6].eq(s) => 6,
_ if ALL_KEYS[7].eq(s) => 7,
_ if ALL_KEYS[8].eq(s) => 8,
_ if ALL_KEYS[9].eq(s) => 9,
_ if ALL_KEYS[10].eq(s) => 10,
_ if ALL_KEYS[11].eq(s) => 11,
_ if ALL_KEYS[12].eq(s) => 12,
_ if ALL_KEYS[13].eq(s) => 13,
_ if ALL_KEYS[14].eq(s) => 14,
_ if ALL_KEYS[15].eq(s) => 15,
_ if ALL_KEYS[16].eq(s) => 16,
_ if ALL_KEYS[17].eq(s) => 17,
_ if ALL_KEYS[18].eq(s) => 18,
_ if ALL_KEYS[19].eq(s) => 19,
_ if ALL_KEYS[20].eq(s) => 20,
_ if ALL_KEYS[21].eq(s) => 21,
_ if ALL_KEYS[22].eq(s) => 22,
_ if ALL_KEYS[23].eq(s) => 23,
_ if ALL_KEYS[24].eq(s) => 24,
_ if ALL_KEYS[25].eq(s) => 25,
_ if ALL_KEYS[26].eq(s) => 26,
_ if ALL_KEYS[27].eq(s) => 27,
_ if ALL_KEYS[28].eq(s) => 28,
_ if ALL_KEYS[29].eq(s) => 29,
_ if ALL_KEYS[30].eq(s) => 30,
_ if ALL_KEYS[31].eq(s) => 31,
_ if ALL_KEYS[32].eq(s) => 32,
_ if ALL_KEYS[33].eq(s) => 33,
_ if ALL_KEYS[34].eq(s) => 34,
_ if ALL_KEYS[35].eq(s) => 35,
_ if ALL_KEYS[36].eq(s) => 36,
_ if ALL_KEYS[37].eq(s) => 37,
_ if ALL_KEYS[38].eq(s) => 38,
_ if ALL_KEYS[39].eq(s) => 39,
_ if ALL_KEYS[40].eq(s) => 40,
_ if ALL_KEYS[41].eq(s) => 41,
_ if ALL_KEYS[42].eq(s) => 42,
_ if ALL_KEYS[43].eq(s) => 43,
_ if ALL_KEYS[44].eq(s) => 44,
_ if ALL_KEYS[45].eq(s) => 45,
_ if ALL_KEYS[46].eq(s) => 46,
_ if ALL_KEYS[47].eq(s) => 47,
_ if ALL_KEYS[48].eq(s) => 48,
_ if ALL_KEYS[49].eq(s) => 49,
_ if ALL_KEYS[50].eq(s) => 50,
_ if ALL_KEYS[51].eq(s) => 51,
_ if ALL_KEYS[52].eq(s) => 52,
_ if ALL_KEYS[53].eq(s) => 53,
_ if ALL_KEYS[54].eq(s) => 54,
_ if ALL_KEYS[55].eq(s) => 55,
_ if ALL_KEYS[56].eq(s) => 56,
_ if ALL_KEYS[57].eq(s) => 57,
_ if ALL_KEYS[58].eq(s) => 58,
_ if ALL_KEYS[59].eq(s) => 59,
_ if ALL_KEYS[60].eq(s) => 60,
_ if ALL_KEYS[61].eq(s) => 61,
_ if ALL_KEYS[62].eq(s) => 62,
_ if ALL_KEYS[63].eq(s) => 63,
_ if ALL_KEYS[64].eq(s) => 64,
_ if ALL_KEYS[65].eq(s) => 65,
_ if ALL_KEYS[66].eq(s) => 66,
_ if ALL_KEYS[67].eq(s) => 67,
_ if ALL_KEYS[68].eq(s) => 68,
_ if ALL_KEYS[69].eq(s) => 69,
_ if ALL_KEYS[70].eq(s) => 70,
_ if ALL_KEYS[71].eq(s) => 71,
_ if ALL_KEYS[72].eq(s) => 72,
_ if ALL_KEYS[73].eq(s) => 73,
_ if ALL_KEYS[74].eq(s) => 74,
_ if ALL_KEYS[75].eq(s) => 75,
_ if ALL_KEYS[76].eq(s) => 76,
_ if ALL_KEYS[77].eq(s) => 77,
_ if ALL_KEYS[78].eq(s) => 78,
_ if ALL_KEYS[79].eq(s) => 79,
_ if ALL_KEYS[80].eq(s) => 80,
_ if ALL_KEYS[81].eq(s) => 81,
_ if ALL_KEYS[82].eq(s) => 82,
_ if ALL_KEYS[83].eq(s) => 83,
_ if ALL_KEYS[84].eq(s) => 85,
_ if ALL_KEYS[85].eq(s) => 86,
_ if ALL_KEYS[86].eq(s) => 87,
_ if ALL_KEYS[87].eq(s) => 88,
_ if ALL_KEYS[88].eq(s) => 89,
_ if ALL_KEYS[89].eq(s) => 90,
_ if ALL_KEYS[90].eq(s) => 91,
_ if ALL_KEYS[91].eq(s) => 92,
_ if ALL_KEYS[92].eq(s) => 93,
_ if ALL_KEYS[93].eq(s) => 94,
_ if ALL_KEYS[94].eq(s) => 95,
_ if ALL_KEYS[95].eq(s) => 96,
_ if ALL_KEYS[96].eq(s) => 97,
_ if ALL_KEYS[97].eq(s) => 98,
_ if ALL_KEYS[98].eq(s) => 99,
_ if ALL_KEYS[99].eq(s) => 100,
_ if ALL_KEYS[100].eq(s) => 101,
_ if ALL_KEYS[101].eq(s) => 102,
_ if ALL_KEYS[102].eq(s) => 103,
_ if ALL_KEYS[103].eq(s) => 104,
_ if ALL_KEYS[104].eq(s) => 105,
_ if ALL_KEYS[105].eq(s) => 106,
_ if ALL_KEYS[106].eq(s) => 107,
_ if ALL_KEYS[107].eq(s) => 108,
_ if ALL_KEYS[108].eq(s) => 109,
_ if ALL_KEYS[109].eq(s) => 110,
_ if ALL_KEYS[110].eq(s) => 111,
_ if ALL_KEYS[111].eq(s) => 112,
_ if ALL_KEYS[112].eq(s) => 113,
_ if ALL_KEYS[113].eq(s) => 114,
_ if ALL_KEYS[114].eq(s) => 115,
_ if ALL_KEYS[115].eq(s) => 116,
_ if ALL_KEYS[116].eq(s) => 117,
_ if ALL_KEYS[117].eq(s) => 118,
_ if ALL_KEYS[118].eq(s) => 119,
_ if ALL_KEYS[119].eq(s) => 120,
_ if ALL_KEYS[120].eq(s) => 121,
_ if ALL_KEYS[121].eq(s) => 122,
_ if ALL_KEYS[122].eq(s) => 122,
_ if ALL_KEYS[123].eq(s) => 123,
_ if ALL_KEYS[124].eq(s) => 124,
_ if ALL_KEYS[125].eq(s) => 125,
_ if ALL_KEYS[126].eq(s) => 126,
_ if ALL_KEYS[127].eq(s) => 127,
_ if ALL_KEYS[128].eq(s) => 128,
_ if ALL_KEYS[129].eq(s) => 129,
_ if ALL_KEYS[130].eq(s) => 130,
_ if ALL_KEYS[131].eq(s) => 131,
_ if ALL_KEYS[132].eq(s) => 132,
_ if ALL_KEYS[133].eq(s) => 133,
_ if ALL_KEYS[134].eq(s) => 134,
_ if ALL_KEYS[135].eq(s) => 135,
_ if ALL_KEYS[136].eq(s) => 136,
_ if ALL_KEYS[137].eq(s) => 137,
_ if ALL_KEYS[138].eq(s) => 138,
_ if ALL_KEYS[139].eq(s) => 139,
_ if ALL_KEYS[140].eq(s) => 140,
_ if ALL_KEYS[141].eq(s) => 141,
_ if ALL_KEYS[142].eq(s) => 142,
_ if ALL_KEYS[143].eq(s) => 143,
_ if ALL_KEYS[144].eq(s) => 144,
_ if ALL_KEYS[145].eq(s) => 145,
_ if ALL_KEYS[146].eq(s) => 146,
_ if ALL_KEYS[147].eq(s) => 147,
_ if ALL_KEYS[148].eq(s) => 148,
_ if ALL_KEYS[149].eq(s) => 149,
_ if ALL_KEYS[150].eq(s) => 150,
_ if ALL_KEYS[151].eq(s) => 151,
_ if ALL_KEYS[152].eq(s) => 152,
_ if ALL_KEYS[153].eq(s) => 152,
_ if ALL_KEYS[154].eq(s) => 153,
_ if ALL_KEYS[155].eq(s) => 153,
_ if ALL_KEYS[156].eq(s) => 154,
_ if ALL_KEYS[157].eq(s) => 155,
_ if ALL_KEYS[158].eq(s) => 156,
_ if ALL_KEYS[159].eq(s) => 157,
_ if ALL_KEYS[160].eq(s) => 158,
_ if ALL_KEYS[161].eq(s) => 159,
_ if ALL_KEYS[162].eq(s) => 160,
_ if ALL_KEYS[163].eq(s) => 161,
_ if ALL_KEYS[164].eq(s) => 162,
_ if ALL_KEYS[165].eq(s) => 163,
_ if ALL_KEYS[166].eq(s) => 164,
_ if ALL_KEYS[167].eq(s) => 165,
_ if ALL_KEYS[168].eq(s) => 166,
_ if ALL_KEYS[169].eq(s) => 167,
_ if ALL_KEYS[170].eq(s) => 168,
_ if ALL_KEYS[171].eq(s) => 169,
_ if ALL_KEYS[172].eq(s) => 170,
_ if ALL_KEYS[173].eq(s) => 171,
_ if ALL_KEYS[174].eq(s) => 172,
_ if ALL_KEYS[175].eq(s) => 173,
_ if ALL_KEYS[176].eq(s) => 174,
_ if ALL_KEYS[177].eq(s) => 175,
_ if ALL_KEYS[178].eq(s) => 176,
_ if ALL_KEYS[179].eq(s) => 177,
_ if ALL_KEYS[180].eq(s) => 178,
_ if ALL_KEYS[181].eq(s) => 179,
_ if ALL_KEYS[182].eq(s) => 180,
_ if ALL_KEYS[183].eq(s) => 181,
_ if ALL_KEYS[184].eq(s) => 182,
_ if ALL_KEYS[185].eq(s) => 183,
_ if ALL_KEYS[186].eq(s) => 184,
_ if ALL_KEYS[187].eq(s) => 185,
_ if ALL_KEYS[188].eq(s) => 186,
_ if ALL_KEYS[189].eq(s) => 187,
_ if ALL_KEYS[190].eq(s) => 188,
_ if ALL_KEYS[191].eq(s) => 189,
_ if ALL_KEYS[192].eq(s) => 190,
_ if ALL_KEYS[193].eq(s) => 191,
_ if ALL_KEYS[194].eq(s) => 192,
_ if ALL_KEYS[195].eq(s) => 193,
_ if ALL_KEYS[196].eq(s) => 194,
_ if ALL_KEYS[197].eq(s) => 200,
_ if ALL_KEYS[198].eq(s) => 201,
_ if ALL_KEYS[199].eq(s) => 202,
_ if ALL_KEYS[200].eq(s) => 203,
_ if ALL_KEYS[201].eq(s) => 204,
_ if ALL_KEYS[202].eq(s) => 205,
_ if ALL_KEYS[203].eq(s) => 206,
_ if ALL_KEYS[204].eq(s) => 207,
_ if ALL_KEYS[205].eq(s) => 208,
_ if ALL_KEYS[206].eq(s) => 209,
_ if ALL_KEYS[207].eq(s) => 210,
_ if ALL_KEYS[208].eq(s) => 211,
_ if ALL_KEYS[209].eq(s) => 212,
_ if ALL_KEYS[210].eq(s) => 213,
_ if ALL_KEYS[211].eq(s) => 214,
_ if ALL_KEYS[212].eq(s) => 215,
_ if ALL_KEYS[213].eq(s) => 216,
_ if ALL_KEYS[214].eq(s) => 217,
_ if ALL_KEYS[215].eq(s) => 218,
_ if ALL_KEYS[216].eq(s) => 219,
_ if ALL_KEYS[217].eq(s) => 220,
_ if ALL_KEYS[218].eq(s) => 221,
_ if ALL_KEYS[219].eq(s) => 222,
_ if ALL_KEYS[220].eq(s) => 223,
_ if ALL_KEYS[221].eq(s) => 224,
_ if ALL_KEYS[222].eq(s) => 225,
_ if ALL_KEYS[223].eq(s) => 226,
_ if ALL_KEYS[224].eq(s) => 227,
_ if ALL_KEYS[225].eq(s) => 228,
_ if ALL_KEYS[226].eq(s) => 229,
_ if ALL_KEYS[227].eq(s) => 230,
_ if ALL_KEYS[228].eq(s) => 231,
_ if ALL_KEYS[229].eq(s) => 232,
_ if ALL_KEYS[230].eq(s) => 233,
_ if ALL_KEYS[231].eq(s) => 234,
_ if ALL_KEYS[232].eq(s) => 235,
_ if ALL_KEYS[233].eq(s) => 236,
_ if ALL_KEYS[234].eq(s) => 237,
_ if ALL_KEYS[235].eq(s) => 238,
_ if ALL_KEYS[236].eq(s) => 239,
_ if ALL_KEYS[237].eq(s) => 240,
_ if ALL_KEYS[238].eq(s) => 241,
_ if ALL_KEYS[239].eq(s) => 242,
_ if ALL_KEYS[240].eq(s) => 243,
_ => -1
}
} |
use crate::{FunctionData, Instruction, Map, BinaryOp, UnaryOp, Type, Cast, IntPredicate, Value};
#[derive(Default, Copy, Clone, Debug)]
struct KnownBits {
mask: u64,
known: u64,
}
impl KnownBits {
fn sign(&self, ty: Type) -> Option<bool> {
let ty_size = ty.size_bits();
if self.mask & (1 << (ty_size - 1)) != 0 {
return Some((self.known >> (ty_size - 1)) != 0);
}
None
}
}
fn combine_known_bits(kb1: &KnownBits, kb2: &KnownBits) -> KnownBits {
// Get common mask for `kb1` and `kb2`.
let common_mask = kb1.mask & kb2.mask;
// Adjust known values with new common mask.
let kb1_known = kb1.known & common_mask;
let kb2_known = kb2.known & common_mask;
// Create mask without differing known bits.
let valid_mask = !(kb1_known ^ kb2_known);
// Combine known bits.
KnownBits {
mask: common_mask & valid_mask,
known: (kb1_known | kb2_known) & valid_mask,
}
}
/// Try to compare two positive integers.
fn bit_compare_greater(a_bits: &KnownBits, b_bits: &KnownBits, ty: Type) -> Option<bool> {
// Go through every bit from MSB to LSB.
for idx in (0..ty.size_bits()).rev() {
let m = 1 << idx;
// If this bit is not known in both operands we cannot continue.
if (a_bits.mask & m) == 0 || (b_bits.mask & m) == 0 {
return None;
}
// Extract bit values.
let bit_a = (a_bits.known >> idx) & 1 != 0;
let bit_b = (b_bits.known >> idx) & 1 != 0;
if bit_a != bit_b {
// This is first bit from MSB that is different in `a` and `b`.
// If `a` has this bit set it means that `a` is > `b`.
return Some(bit_a);
}
}
None
}
fn bit_add(a: &KnownBits, b: &KnownBits, ty: Type) -> KnownBits {
let mut computed = KnownBits::default();
if a.mask != 0 && b.mask != 0 {
let mut carry = 0;
for idx in 0..ty.size_bits() {
let m = 1 << idx;
// We cannot continue addition if not both operands are known.
if (a.mask & m) == 0 || (b.mask & m) == 0 {
break;
}
let a = (a.known >> idx) & 1;
let b = (b.known >> idx) & 1;
let v = a + b + carry;
// Set this bit as known.
computed.known |= (v % 2) << idx;
computed.mask |= m;
carry = v / 2;
}
}
computed
}
fn bit_neg(bits: &KnownBits, ty: Type) -> KnownBits {
// -a = !a + 1
// Invert all known bits.
let mut computed = *bits;
computed.known = !computed.known & computed.mask;
let is_zero = computed.known == 0 || computed.mask == 0;
let mut new_computed = bit_add(&computed, &KnownBits {
mask: ty.bitmask(),
known: 1,
}, ty);
let mask = 1 << (ty.size_bits() - 1);
if !is_zero && new_computed.mask & mask == 0 {
// If the value is not zero than change sign bit independently (if its not known).
// TODO: This is not correct if overflow happens. Is it OK?
if let Some(sign) = computed.sign(ty) {
new_computed.mask |= mask;
// Sign is already properly inverted.
if sign {
new_computed.known |= mask;
} else {
new_computed.known &= !mask;
}
}
}
new_computed
}
pub struct OptimizeKnownBitsPass;
impl super::Pass for OptimizeKnownBitsPass {
fn name(&self) -> &str {
"bit optimization"
}
fn time(&self) -> crate::timing::TimedBlock {
crate::timing::optimize_known_bits()
}
fn run_on_function(&self, function: &mut FunctionData) -> bool {
let mut did_something = false;
// Compute known bits for every value and optimize some operations
// based on gathered information.
//
// 1. Change `sar` to `shr` if sign bit is known to be zero.
// 2. Remove useless &, | operations.
// 3. Prove comparisons return value.
// 4. Constant propagate extracted partially known values.
let labels = function.reachable_labels();
let processing_order = function.value_processing_order_with_labels(&labels);
let consts = function.constant_values_with_labels(&labels);
let creators = function.value_creators_with_labels(&labels);
let mut known_bits = Map::default();
for value in processing_order {
let mut computed = KnownBits::default();
let mut replacement = None;
let mut constant = None;
let ty = function.value_type(value);
let ty_size = ty.size_bits();
let ty_bitmask = ty.bitmask();
if let Some(creator) = creators.get(&value).copied() {
let instruction = function.instruction_mut(creator);
match instruction {
Instruction::Alias { value, .. } => {
computed = known_bits[value];
}
Instruction::ArithmeticUnary { op, value, .. } => {
match op {
UnaryOp::Not => {
// Invert all known bits.
computed = known_bits[value];
computed.known = !computed.known & computed.mask;
}
UnaryOp::Neg => {
computed = bit_neg(&known_bits[value], ty);
}
}
}
Instruction::ArithmeticBinary { dst, a, op, b } => {
let dst = *dst;
let a_value = *a;
let b_value = *b;
let a = known_bits[a];
let mut b = known_bits[b];
match op {
BinaryOp::Or | BinaryOp::And | BinaryOp::Xor => {
// Get common `known` and `mask` for two operands.
let mut mask = a.mask & b.mask;
let mut known = match op {
BinaryOp::Or => a.known | b.known,
BinaryOp::And => a.known & b.known,
BinaryOp::Xor => a.known ^ b.known,
_ => unreachable!(),
} & mask;
// `and` and `or` can give us more information about known bits.
if *op != BinaryOp::Xor {
// Check every bit.
for idx in 0..ty.size_bits() {
let m = 1 << idx;
match op {
BinaryOp::Or => {
// If this bit is one in at least one operand
// than destination will be one too.
let a_one = (a.known & m != 0) &&
(a.mask & m != 0);
let b_one = (b.known & m != 0) &&
(b.mask & m != 0);
if a_one || b_one {
mask |= m;
known |= m;
}
}
BinaryOp::And => {
// If this bit is zero in at least zero operand
// than destination will be zero too.
let a_zero = (a.known & m == 0) &&
(a.mask & m != 0);
let b_zero = (b.known & m == 0) &&
(b.mask & m != 0);
if a_zero || b_zero {
mask |= m;
known &= !m;
}
}
_ => unreachable!(),
}
}
// If one operand is constant maybe we can prove that
// this operation is unneccesary.
if a.mask == ty_bitmask || b.mask == ty_bitmask {
// Get fully known value and partially known value.
let (bits, value, operand) = if a.mask == ty_bitmask {
(b, a.known, b_value)
} else if b.mask == ty_bitmask {
(a, b.known, a_value)
} else {
unreachable!()
};
let mut alias = None;
// Check if this operation can affect the result. If
// it can't, optimize it out.
match op {
BinaryOp::And => {
// 1. Make sure that all 0 bits in `value`
// are known.
// 2. Make sure that and doesn't affect known bits.
if ((!bits.mask & !value) & ty_bitmask) == 0 &&
(bits.known & value) == bits.known {
alias = Some(operand);
}
}
BinaryOp::Or => {
// 1. Make sure that all 1 bits in `value`
// are known.
// 2. Make sure that or doesn't affect known bits.
if ((!bits.mask & value) & ty_bitmask) == 0 &&
(bits.known | value) == bits.known {
alias = Some(operand);
}
}
_ => unreachable!(),
}
// Replace unneeded instruction.
if let Some(alias) = alias {
replacement = Some(Instruction::Alias {
dst,
value: alias,
});
}
}
}
computed.mask = mask;
computed.known = known;
}
BinaryOp::Shl | BinaryOp::Shr | BinaryOp::Sar => {
if let Some((_, amount)) = consts.get(&b_value).copied() {
// Shift known bits by the specified amount.
let (mut mask, mut known) = match op {
BinaryOp::Shl => (a.mask << amount, a.known << amount),
BinaryOp::Shr | BinaryOp::Sar => {
(a.mask >> amount, a.known >> amount)
}
_ => unreachable!(),
};
// Clear out of bounds bits.
mask &= ty_bitmask;
known &= ty_bitmask;
if amount != 0 {
// Some bits after shifting may become known.
// Calculate mask of shifted out bits.
let mut amount_mask = 0;
for idx in 0..amount {
amount_mask |= 1 << idx;
}
match op {
BinaryOp::Shl => {
// All shifted bits become zero.
mask |= amount_mask;
known &= !amount_mask;
}
BinaryOp::Shr => {
// All shifted bits become zero. Shifted bits
// are on the right side.
let amount_mask = amount_mask <<
(ty_size - amount as usize);
mask |= amount_mask;
known &= !amount_mask;
}
BinaryOp::Sar => {
// Bits become known only if `a` sign bit is known.
if let Some(sign) = a.sign(ty) {
// All shifted bits become equal to sign
// of `a`. Shifted bits are on the right side.
let amount_mask = amount_mask <<
(ty_size - amount as usize);
mask |= amount_mask;
if sign {
known |= amount_mask;
} else {
known &= !amount_mask;
}
}
}
_ => unreachable!(),
}
}
computed.mask = mask;
computed.known = known;
}
// If we are sure that the sign bit is 0 than we can
// optimize `sar` to `shr`.
if *op == BinaryOp::Sar {
if let Some(false) = a.sign(ty) {
*op = BinaryOp::Shr;
did_something = true;
}
}
}
BinaryOp::Add | BinaryOp::Sub => {
if *op == BinaryOp::Sub {
// x - y => x + (-y)
b = bit_neg(&b, ty);
}
computed = bit_add(&a, &b, ty);
}
_ => {}
}
}
Instruction::Select { on_true, on_false, .. } => {
let on_true = known_bits[on_true];
let on_false = known_bits[on_false];
computed = combine_known_bits(&on_true, &on_false);
}
Instruction::Phi { incoming, .. } => {
let mut first = true;
for (_, value) in incoming {
if let Some(known) = known_bits.get(value) {
if first {
computed = *known;
first = false;
} else {
computed = combine_known_bits(&computed, &known);
}
} else {
// Cannot get known bits for all PHI incoming values.
computed.mask = 0;
computed.known = 0;
break;
}
}
}
&mut Instruction::IntCompare { a, pred, b, .. } => {
let ty = function.value_type(a);
let mut a = known_bits[&a];
let mut b = known_bits[&b];
// Try to resolve `cmp` result using known bits.
let result = match pred {
IntPredicate::Equal | IntPredicate::NotEqual => {
// Quickly prove inequality by comparing operands known bits.
let common_mask = a.mask & b.mask;
let not_equal = (a.known & common_mask) !=
(b.known & common_mask);
if not_equal {
match pred {
IntPredicate::Equal => Some(false),
IntPredicate::NotEqual => Some(true),
_ => unreachable!(),
}
} else {
// We can only prove inequality, we don't know if values
// are equal.
None
}
}
IntPredicate::GtS | IntPredicate::GteS |
IntPredicate::LtS | IntPredicate::LteS => {
if matches!(pred, IntPredicate::LtS | IntPredicate::LteS) {
std::mem::swap(&mut a, &mut b);
}
if let (Some(a_sign), Some(b_sign)) = (a.sign(ty), b.sign(ty)) {
if a_sign != b_sign {
// If `b` is negative than `a` is positive and `a` is
// always > and >= `b`.
Some(b_sign)
} else if !a_sign {
// Compare positive integers.
bit_compare_greater(&a, &b, ty)
} else {
// Fake positive integers. Because of how
// `bit_comapre_greator` works it shouldn't affect results.
a.known = !a.known & a.mask;
b.known = !b.known & b.mask;
bit_compare_greater(&a, &b, ty)
}
} else {
// We cannot reason about this compare because we don't know
// sign bits of both inputs.
None
}
}
IntPredicate::GtU | IntPredicate::GteU => {
bit_compare_greater(&a, &b, ty)
}
IntPredicate::LtU | IntPredicate::LteU => {
bit_compare_greater(&b, &a, ty)
}
};
// If comparison result is constant than replace `cmp` with that constant.
if let Some(result) = result {
constant = Some(result as u64);
}
}
&mut Instruction::Cast { cast, value, .. } => {
let input_ty = function.value_type(value);
let input_bitmask = input_ty.bitmask();
let value = known_bits[&value];
match cast {
Cast::Truncate | Cast::Bitcast => {
// Just carry over previous known value and mask off truncated
// part.
computed = value;
computed.mask &= ty_bitmask;
computed.known &= ty_bitmask;
}
Cast::SignExtend | Cast::ZeroExtend => {
// Try to get value of extension bit.
let extension_bit = match cast {
Cast::ZeroExtend => Some(false),
Cast::SignExtend => value.sign(input_ty),
_ => unreachable!(),
};
// Add new extension known bits.
if let Some(extension_bit) = extension_bit {
// Calculate mask of all bits that will be set to
// `extension_bit`.
let extension_mask = ty_bitmask & !input_bitmask;
computed.mask |= extension_mask;
if extension_bit {
computed.known |= extension_mask;
} else {
computed.known &= !extension_mask;
}
}
// Copy bit information of original input to lower part of output.
computed.known |= value.known;
computed.mask |= value.mask;
}
}
}
_ => {}
}
if let Some(constant) = constant {
assert!(replacement.is_none(), "Both replacement and constant are Some.");
replacement = Some(Instruction::Alias {
dst: value,
value: function.add_constant(ty, constant),
});
}
if let Some(replacement) = replacement {
*function.instruction_mut(creator) = replacement;
did_something = true;
} else if computed.mask == ty_bitmask {
// If all bits are known and this value isn't a constant than replace
// instruction with a constant.
let mut stripped = value;
// Strip aliases.
loop {
if let Some(creator) = creators.get(&stripped) {
let instruction = function.instruction(*creator);
if let Instruction::Alias { value, .. } = instruction {
stripped = *value;
continue;
}
}
break;
}
if let Some(creator) = creators.get(&stripped) {
let constant = function.add_constant(ty, computed.known);
let instruction = function.instruction_mut(*creator);
*instruction = Instruction::Alias {
dst: instruction.created_value().unwrap(),
value: constant,
};
did_something = true;
}
}
} else if let Some((_, value)) = function.constant(value) {
// For constant all bits are known.
computed.mask = ty_bitmask;
computed.known = value & ty_bitmask;
};
assert!(!computed.mask & computed.known == 0 &&
computed.mask & !ty_bitmask == 0 &&
computed.known & !ty_bitmask == 0, "Computed invalid known bits.");
known_bits.insert(value, computed);
}
if false {
dump_known_bits(function, &known_bits);
}
did_something
}
}
fn dump_known_bits(function: &FunctionData, known_bits: &Map<Value, KnownBits>) {
println!("Known bits for {}:", function.prototype.name);
for (value, known_bits) in known_bits {
if known_bits.mask == 0 {
continue;
}
print!("{}", value);
if value.0 < 10 {
print!(" ");
}
print!(": ");
let size = function.value_type(*value).size_bits();
for idx in (0..size).rev() {
let mask = 1 << idx;
if known_bits.mask & mask == 0 {
print!("_");
} else {
print!("{}", (known_bits.known >> idx) & 1);
}
}
println!();
}
println!();
}
|
#![deny(missing_docs,
missing_debug_implementations, missing_copy_implementations,
trivial_casts, trivial_numeric_casts,
unsafe_code,
unstable_features,
unused_import_braces, unused_qualifications)]
//! documentation for pokemon
//! Yep.
extern crate rustc_serialize;
extern crate csv;
extern crate rand;
pub mod pokemon {
//! documentation for pokemon
//! Yep.
use std::fs::File;
use std::path::Path;
use rand::{thread_rng, Rng};
use csv;
/// The struct for `Pokemon`
#[derive(RustcDecodable, RustcEncodable, Clone, Debug)]
pub struct Pokemon {
species: i32,
language: i32,
name: String,
genus: Option<String>,
}
fn search<P: AsRef<Path>>(file_path: P) -> Vec<Pokemon> {
let file = File::open(file_path).unwrap();
let mut rdr = csv::Reader::from_reader(file).has_headers(true);
let rows:Vec<Pokemon> = rdr.decode().collect::<csv::Result<Vec<Pokemon>>>().unwrap();
rows
}
fn search_one<P: AsRef<Path>>(file_path: P, index: usize, lang_id: i32) -> Pokemon {
let file = File::open(file_path).unwrap();
let mut rdr = csv::Reader::from_reader(file).has_headers(true);
let rows:Vec<Pokemon> = rdr.decode().collect::<csv::Result<Vec<Pokemon>>>().unwrap();
let mut pokey:Pokemon = Pokemon {
species: 1,
language: 1,
name: "Bulbasaur".to_string(),
genus: Some("seed".to_string()),
};
for poke in &rows {
if poke.species == index as i32 && poke.language == lang_id {
//println!("{:?}", poke);
pokey = poke.clone();
}
}
pokey
}
/// Returns all the Pokemon, in each language
#[allow(dead_code)]
pub fn get_all() -> Vec<Pokemon> {
search("data/pokemon.csv")
}
/// Returns a single Pokemon based on specified id and language
/// MB: English is 9
#[allow(dead_code)]
pub fn get_pokemon(id: usize, lang_id: i32) -> Pokemon {
search_one("data/pokemon.csv", id, lang_id)
}
/// Returns a random Pokemon in English
#[allow(dead_code)]
pub fn get_random() -> Pokemon {
let mut rng = thread_rng();
search_one("data/pokemon.csv", rng.gen_range::<usize>(1, 802), 9)
}
/// Returns a random pokemon with the specified language
#[allow(dead_code)]
pub fn get_random_with_lang(lang_id: i32) -> Pokemon {
let mut rng = thread_rng();
search_one("data/pokemon.csv", rng.gen_range::<usize>(1, 802), lang_id)
}
/// Returns the name of the specified id
/// Using 9 for example, would return "Blastoise"
#[allow(dead_code, unused_variables)]
pub fn get_name(id: usize) -> String {
search_one("data/pokemon.csv", id, 9).name
}
/// Same as get_name(), but allows you to specify the language
#[allow(dead_code)]
pub fn get_name_with_lang(id: usize, lang_id: i32) -> String {
search_one("data/pokemon.csv", id, lang_id).name
}
}
|
//! # Container module for motor types
#[macro_use]
mod dc_motor_macro;
#[macro_use]
mod servo_motor_macro;
#[macro_use]
mod tacho_motor_macro;
mod large_motor;
pub use self::large_motor::LargeMotor;
mod medium_motor;
pub use self::medium_motor::MediumMotor;
mod tacho_motor;
pub use self::tacho_motor::TachoMotor;
use crate::{port_constants, Port};
/// EV3 ports `outA` to `outD`
#[derive(Debug, Copy, Clone)]
pub enum MotorPort {
/// EV3 `outA` port
OutA,
/// EV3 `outB` port
OutB,
/// EV3 `outC` port
OutC,
/// EV3 `outD` port
OutD,
}
impl MotorPort {
/// Try to format a device name path to a port name.
pub fn format_name(name: &str) -> String {
match name {
"motor0" => MotorPort::OutA.address(),
"motor1" => MotorPort::OutB.address(),
"motor2" => MotorPort::OutC.address(),
"motor3" => MotorPort::OutD.address(),
_ => name.to_owned(),
}
}
}
impl Port for MotorPort {
fn address(&self) -> String {
match self {
MotorPort::OutA => port_constants::OUTPUT_A.to_owned(),
MotorPort::OutB => port_constants::OUTPUT_B.to_owned(),
MotorPort::OutC => port_constants::OUTPUT_C.to_owned(),
MotorPort::OutD => port_constants::OUTPUT_D.to_owned(),
}
}
}
|
//! Contains a struct which is one measurement run of the coincidence counter
//! should be used to extract the time information from the fifo data
use std::thread;
use std::time::Duration;
use crate::types::{HydraHarpError, CTCStatus};
const OVERFLOW_PERIOD: u64 = 33554432;
const OVERFLOW_MASK: u32 = (63 << 25);
const TIME_MASK: u32 = (1 << 24) - 1;
/// Describes the different types a T2 value can have
pub enum T2Value {
/// The u8 is the channel number, the u32 is the time
Time(u8, u32),
/// The u32 is the time
Sync(u32),
Overflow(u32),
InternalSync(u8),
ExternalSync(u8),
}
/// Converts a single 4 byte phrase into a T2Value
pub fn convert_T2_value(v: &u32) -> T2Value {
use crate::measurement::T2Value::*;
match (v & (1 << 31)) {
0 => Time((v >> 25) as u8 & 63u8, v & TIME_MASK),
_ => match (v & OVERFLOW_MASK) {
OVERFLOW_MASK => Overflow(v & ((1 << 24) - 1)),
0 => Sync(v & TIME_MASK),
_ => InternalSync(0),
},
}
}
/// The measurement struct which keeps track of timining overflows
pub struct Measurement {
pub time_overflow: u64,
}
impl Measurement {
/// Define a new measurement, setting an overflow if needed
pub fn new(overflow: u64) -> Measurement {
Measurement {
time_overflow: overflow,
}
}
/// Convert a set of fifo outputs in T2 mode into a vector of channels and times
/// Sets the sync channel to index zero and the rest higher
pub fn convert_values_T2(&mut self, input: &[u32]) -> Vec<(u8, u64)> {
use crate::measurement::T2Value::*;
let mut times = Vec::with_capacity(input.len());
for i in input {
match convert_T2_value(i) {
Time(c, t) => times.push((c+1, t as u64 + self.time_overflow)),
Sync(t) => times.push((0, t as u64 + self.time_overflow)),
Overflow(t) => self.time_overflow += (t as u64) * OVERFLOW_PERIOD,
_ => (),
}
}
times
}
}
pub trait Measureable {
fn start_measurement(&mut self, acquisition_time: i32) -> Result<(), HydraHarpError>;
fn read_fifo(&mut self, buffer: &mut [u32], records_to_fetch: i32) -> Result<i32, HydraHarpError>;
fn get_CTC_status(&self) -> Result<CTCStatus, HydraHarpError>;
}
pub struct TestMeasureable {
time: u64,
}
impl TestMeasureable {
fn new() -> TestMeasureable {
TestMeasureable {
time: 0
}
}
}
impl Measureable for TestMeasureable {
fn start_measurement(&mut self, acquisition_time: i32) -> Result<(), HydraHarpError> {
Ok(())
}
fn read_fifo(&mut self, buffer: &mut [u32], records_to_fetch: i32) -> Result<i32, HydraHarpError> {
Ok(0)
}
fn get_CTC_status(&self) -> Result<CTCStatus, HydraHarpError> {
Ok(CTCStatus::Ended)
}
}
|
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, Block, FnArg, Ident, ImplItem, ItemImpl, LitStr};
/// Generate a blocking method for each async method in an impl block. Supports either `tokio` or `async-std` backend.
/// Generated methods are suffixed with `_blocking`.
///
/// # Example `tokio`
/// ```
/// use block_on_proc::block_on;
///
/// struct Tokio {}
///
/// #[block_on("tokio")]
/// impl Tokio {
/// async fn test_async(&self) {}
/// }
/// ```
///
/// Generates the following impl block
/// ```no_run
/// # struct Dummy {}
/// # impl Dummy {
/// async fn test_async(&self) {}
///
/// fn test_async_blocking(&self) {
/// use tokio::runtime::Runtime;
/// let mut rt = Runtime::new().unwrap();
/// rt.block_on(self.test_async())
/// }
/// # }
/// ```
///
/// # Example `async-std`
/// ```
/// use block_on_proc::block_on;
///
/// struct AsyncStd {}
///
/// #[block_on("async-std")]
/// impl AsyncStd {
/// async fn test_async(&self) {}
/// }
/// ```
///
/// Generates the following method in the same impl block
/// ```no_run
/// # struct Dummy {}
/// # impl Dummy {
/// async fn test_async(&self) {}
///
/// fn test_async_blocking(&self) {
/// use async_std::task;
/// task::block_on(self.test_async())
/// }
/// # }
/// ```
#[proc_macro_attribute]
pub fn block_on(attr: TokenStream, tokens: TokenStream) -> TokenStream {
let attr = parse_macro_input!(attr as LitStr).value();
let orig_tokens = tokens;
let in_impl = parse_macro_input!(orig_tokens as ItemImpl);
let strct = in_impl.self_ty.clone();
let mut orig_impl = in_impl.clone();
let mut out_impl = in_impl.clone();
out_impl.items = Vec::new();
for item in in_impl.items {
match item {
ImplItem::Method(method) => {
let name = &method.sig.ident;
let mut out_method = method.clone();
if out_method.sig.asyncness.is_none() {
continue;
}
out_method.sig.asyncness = None;
out_method.sig.ident = Ident::new(
&format!("{}_blocking", method.sig.ident.to_string()),
method.sig.ident.span(),
);
let inputs = &method.sig.inputs;
let rec = inputs.into_iter().any(|arg| match arg {
FnArg::Receiver(_) => true,
FnArg::Typed(_) => false,
});
let call_args = inputs
.into_iter()
.map(|arg| match arg {
FnArg::Receiver(_) => None,
FnArg::Typed(arg) => Some(arg.pat.clone()),
})
.filter(|pat| pat.is_some())
.map(|arg| arg.unwrap());
let block_proc2 = if rec {
{
if attr == "tokio" {
quote! {
{
use tokio::runtime::Runtime;
let mut rt = Runtime::new().unwrap();
rt.block_on(self.#name(#(#call_args),*))
}
}
} else if attr == "async-std" {
quote! {
{
use async_std::task;
task::block_on(self.#name(#(#call_args),*))
}
}
} else {
panic!("Only `tokio` and `async-std` backends are supported!")
}
}
} else {
if attr == "tokio" {
quote! {
{
use tokio::runtime::Runtime;
let mut rt = Runtime::new().unwrap();
rt.block_on(#strct::#name(#(#call_args),*))
}
}
} else if attr == "async-std" {
quote! {
{
use async_std::task;
task::block_on(#strct::#name(#(#call_args),*))
}
}
} else {
panic!("Only `tokio` and `async-std` backends are supported!")
}
};
let block_proc = proc_macro::TokenStream::from(block_proc2);
out_method.block = parse_macro_input!(block_proc as Block);
orig_impl.items.push(ImplItem::Method(out_method));
}
_ => {}
}
}
// Returns generated tokens
let out = quote! {
#orig_impl
};
out.into()
}
#[test]
fn ui() {
let t = trybuild::TestCases::new();
t.pass("src/test.rs");
}
|
use crate::sig::*;
use nom::{
bytes::complete::{tag, take},
number::complete::le_u64,
IResult,
};
use std::fmt;
#[derive(Debug)]
pub struct WallascnUni {
source: Vec<u8>,
}
impl HasWrite for WallascnUni {
fn write(&self) -> Vec<u8> {
let mut out = self.name().as_bytes().to_vec();
out.extend(vec![0u8]);
out.extend(offset(self.source.len()).iter());
out.extend(&self.source);
out
}
fn name(&self) -> &str {
"wallascn.uni"
}
}
impl fmt::Display for WallascnUni {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} source.len: {}", &self.name(), &self.source.len())
}
}
pub fn read_wallascn_uni(i: &[u8]) -> IResult<&[u8], WallascnUni> {
let (i, _) = tag("wallascn.uni")(i)?;
let (i, _) = take(1u8)(i)?;
let (i, offset) = le_u64(i)?;
let (i, source) = take(offset)(i)?;
Ok((
i,
WallascnUni {
source: source.to_vec(),
},
))
}
|
use stringify::Stringify;
use std::ffi::{CStr, CString};
use std::borrow::Cow;
#[test]
fn i32_convert_to_cow_str_test() {
let integer = 1;
assert_eq!(integer.convert_to_cow_str(), Cow::Borrowed("1"));
}
#[test]
fn i32_convert_to_cstr_test() {
let integer = 1;
let cstr = unsafe { CStr::from_ptr(CString::new("1").unwrap().as_ptr()) };
assert!(integer.convert_to_cstr() == cstr);
}
#[test]
fn i32_convert_to_str_test() {
let integer = 1;
assert_eq!(integer.convert_to_str(), "1");
}
#[test]
fn i32_convert_to_string_test() {
let integer = 1;
assert_eq!(integer.convert_to_string(), "1".to_string());
}
#[test]
fn i32_convert_to_libc_char_test() {
let integer = 1;
let libc_char1 = integer.convert_to_libc_char();
let libc_char2 = CString::new("1".to_string()).unwrap().as_ptr();
unsafe {
assert_eq!(*libc_char1, *libc_char2);
}
}
|
use std::collections::HashSet;
use std::borrow::Cow;
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Token<'a> {
INSTRUCTION(&'a str),
UNKNOWN(char),
REGISTER(u16),
INDEX,
DT,
ST,
KEY,
F,
B,
NEWLINE,
MINUS,
COMMA,
EOF,
LBRACKET,
RBRACKET,
BYTE(u16),
ADDR(u16),
NIBBLE(u16),
}
pub struct Instructions(HashSet<&'static str>);
impl Instructions {
pub fn new() -> Self {
let mut ins = HashSet::new();
ins.insert("sys");
ins.insert("cls");
ins.insert("ret");
ins.insert("jp");
ins.insert("call");
ins.insert("se");
ins.insert("sne");
ins.insert("ld");
ins.insert("add");
ins.insert("or");
ins.insert("and");
ins.insert("sub");
ins.insert("xor");
ins.insert("shl");
ins.insert("shr");
ins.insert("subn");
ins.insert("rnd");
ins.insert("drw");
ins.insert("skp");
ins.insert("sknp");
ins.insert("drw");
Self(ins)
}
}
impl<'a> Token<'a> {
pub fn from(inst: &Instructions, ident: &'a str) -> Result<Token<'a>, Cow<'static, str>> {
match ident {
ident if inst.0.contains(ident) => Ok(Token::INSTRUCTION(ident)),
"i" => Ok(Token::INDEX),
"k" => Ok(Token::KEY),
"dt" => Ok(Token::DT),
"st" => Ok(Token::ST),
"f" => Ok(Token::F),
"b" => Ok(Token::B),
reg_str if ident.chars().nth(0) == Some('v') => {
if reg_str.len() != 2 {
return Err(format!("Invalid register {}", reg_str).into());
}
let hex_val = u16::from_str_radix(®_str[1..], 16)
.map_err(|_| format!("Invalid hex value {}", reg_str))?;
Ok(Token::REGISTER(hex_val))
}
token => Err(format!("Invalid token {}", token).into())
}
}
}
|
//! `malloc`-based Box.
use stable_deref_trait::StableDeref;
use std::cmp::Ordering;
use std::convert::{AsMut, AsRef};
use std::fmt::{Debug, Display, Formatter, Pointer, Result as FormatResult};
use std::hash::{Hash, Hasher};
use std::iter::{DoubleEndedIterator, FromIterator, IntoIterator};
use std::marker::Unpin;
use std::mem::{forget, MaybeUninit};
use std::ops::{Deref, DerefMut};
use std::pin::Pin;
use std::ptr::{copy_nonoverlapping, drop_in_place, read, write};
use std::slice::{from_raw_parts, from_raw_parts_mut, Iter, IterMut};
use std::str::{from_utf8, from_utf8_unchecked, Utf8Error};
use std::{
borrow::{Borrow, BorrowMut},
ptr::NonNull,
};
use internal::{gen_free, gen_malloc, gen_realloc, Unique};
#[cfg(test)]
use internal::{DropCounter, PanicOnClone};
#[cfg(test)]
use std::iter::{once, repeat};
#[cfg(test)]
use std::mem::size_of;
#[cfg(nightly_channel)]
use std::marker::Unsize;
#[cfg(nightly_channel)]
use std::ops::CoerceUnsized;
use free::Free;
//{{{ Basic structure -----------------------------------------------------------------------------
/// A malloc-backed box. This structure allows Rust to exchange objects with C without cloning.
pub struct MBox<T: ?Sized + Free>(Unique<T>);
impl<T: ?Sized + Free> MBox<T> {
/// Constructs a new malloc-backed box from a pointer allocated by `malloc`.
///
/// # Safety
///
/// The `ptr` must be allocated via `malloc()`, `calloc()` or similar C functions that is
/// expected to be deallocated using `free()`. It must not be null. The content of the pointer
/// must be already initialized. The pointer's ownership is passed into the box, and thus should
/// not be used after this function returns.
pub unsafe fn from_raw(ptr: *mut T) -> Self {
Self::from_non_null_raw(NonNull::new_unchecked(ptr))
}
/// Constructs a new malloc-backed box from a non-null pointer allocated by `malloc`.
///
/// # Safety
///
/// The `ptr` must be allocated via `malloc()`, `calloc()` or similar C functions that is
/// expected to be deallocated using `free()`. The content of the pointer must be already
/// initialized. The pointer's ownership is passed into the box, and thus should not be used
/// after this function returns.
pub unsafe fn from_non_null_raw(ptr: NonNull<T>) -> Self {
Self(Unique::new(ptr))
}
/// Obtains the pointer owned by the box.
pub fn as_ptr(boxed: &Self) -> *const T {
boxed.0.as_non_null_ptr().as_ptr()
}
/// Obtains the mutable pointer owned by the box.
pub fn as_mut_ptr(boxed: &mut Self) -> *mut T {
boxed.0.as_non_null_ptr().as_ptr()
}
/// Consumes the box and returns the original pointer.
///
/// The caller is responsible for `free`ing the pointer after this.
pub fn into_raw(boxed: Self) -> *mut T {
Self::into_non_null_raw(boxed).as_ptr()
}
/// Consumes the box and returns the original non-null pointer.
///
/// The caller is responsible for `free`ing the pointer after this.
pub fn into_non_null_raw(boxed: Self) -> NonNull<T> {
let ptr = boxed.0.as_non_null_ptr();
forget(boxed);
ptr
}
}
impl<T: ?Sized + Free> Drop for MBox<T> {
fn drop(&mut self) {
// SAFETY: the pointer is assumed to be obtained from `malloc()`.
unsafe { T::free(self.0.as_non_null_ptr()) };
}
}
impl<T: ?Sized + Free> Deref for MBox<T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*Self::as_ptr(self) }
}
}
unsafe impl<T: ?Sized + Free> StableDeref for MBox<T> {}
impl<T: ?Sized + Free> Unpin for MBox<T> {}
impl<T: ?Sized + Free> DerefMut for MBox<T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *Self::as_mut_ptr(self) }
}
}
impl<T: ?Sized + Free> AsRef<T> for MBox<T> {
fn as_ref(&self) -> &T {
self
}
}
impl<T: ?Sized + Free> AsMut<T> for MBox<T> {
fn as_mut(&mut self) -> &mut T {
self
}
}
impl<T: ?Sized + Free> Borrow<T> for MBox<T> {
fn borrow(&self) -> &T {
self
}
}
impl<T: ?Sized + Free> BorrowMut<T> for MBox<T> {
fn borrow_mut(&mut self) -> &mut T {
self
}
}
#[cfg(nightly_channel)]
impl<T: ?Sized + Free + Unsize<U>, U: ?Sized + Free> CoerceUnsized<MBox<U>> for MBox<T> {}
impl<T: ?Sized + Free> Pointer for MBox<T> {
fn fmt(&self, formatter: &mut Formatter) -> FormatResult {
Pointer::fmt(&Self::as_ptr(self), formatter)
}
}
impl<T: ?Sized + Free + Debug> Debug for MBox<T> {
fn fmt(&self, formatter: &mut Formatter) -> FormatResult {
self.deref().fmt(formatter)
}
}
impl<T: ?Sized + Free + Display> Display for MBox<T> {
fn fmt(&self, formatter: &mut Formatter) -> FormatResult {
self.deref().fmt(formatter)
}
}
impl<T: ?Sized + Free + Hash> Hash for MBox<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.deref().hash(state)
}
}
impl<U: ?Sized + Free, T: ?Sized + Free + PartialEq<U>> PartialEq<MBox<U>> for MBox<T> {
fn eq(&self, other: &MBox<U>) -> bool {
self.deref().eq(other.deref())
}
}
impl<T: ?Sized + Free + Eq> Eq for MBox<T> {}
impl<U: ?Sized + Free, T: ?Sized + Free + PartialOrd<U>> PartialOrd<MBox<U>> for MBox<T> {
fn partial_cmp(&self, other: &MBox<U>) -> Option<Ordering> {
self.deref().partial_cmp(other.deref())
}
}
impl<T: ?Sized + Free + Ord> Ord for MBox<T> {
fn cmp(&self, other: &Self) -> Ordering {
self.deref().cmp(other.deref())
}
}
//}}}
//{{{ Single object -------------------------------------------------------------------------------
impl<T> MBox<T> {
/// Constructs a new malloc-backed box, and move an initialized value into it.
pub fn new(value: T) -> Self {
let storage = gen_malloc(1);
// SAFETY: the `storage` is uninitialized and enough to store T.
// this pointer is obtained via `malloc` and thus good for `from_raw`.
unsafe {
write(storage.as_ptr(), value);
Self::from_non_null_raw(storage)
}
}
/// Constructs a new malloc-backed box with uninitialized content.
pub fn new_uninit() -> MBox<MaybeUninit<T>> {
let storage = gen_malloc(1);
// SAFETY: The storage is allowed to be uninitialized.
unsafe { MBox::from_non_null_raw(storage) }
}
/// Constructs a new `Pin<MBox<T>>`. If `T` does not implement `Unpin`, then `value` will be
/// pinned in memory and cannot be moved.
pub fn pin(value: T) -> Pin<Self> {
Self::into_pin(Self::new(value))
}
/// Converts an `MBox<T>` into a single-item `MBox<[T]>`.
///
/// This conversion does not allocate on the heap and happens in place.
pub fn into_boxed_slice(boxed: Self) -> MBox<[T]> {
// SAFETY: free() only cares about the allocated size, and `T` and
// `[T; 1]` are equivalent in terms of drop() and free().
unsafe { MBox::from_raw_parts(Self::into_raw(boxed), 1) }
}
/// Consumes the `MBox`, returning the wrapped value.
pub fn into_inner(boxed: Self) -> T {
let mut dst = MaybeUninit::uninit();
let src = Self::into_non_null_raw(boxed);
// SAFETY: after calling `into_raw` above, we have the entire ownership of the malloc'ed
// pointer `src`. The content is moved into the destination. After that, we can free `src`
// without touching the content. So there is a single copy of the content fully initialized
// into `dst` which is safe to assume_init.
unsafe {
copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), 1);
gen_free(src);
dst.assume_init()
}
}
/// Converts an `MBox<T>` into a `Pin<MBox<T>>`.
///
/// This conversion does not allocate on the heap and happens in place.
pub fn into_pin(boxed: Self) -> Pin<Self> {
// SAFETY: Same reason as why `Box::into_pin` is safe.
unsafe { Pin::new_unchecked(boxed) }
}
/// Consumes and leaks the `MBox`, returning a mutable reference, `&'a mut T`.
pub fn leak<'a>(boxed: Self) -> &'a mut T
where
T: 'a,
{
// SAFETY: into_raw takes the ownership of the box, which is then immediately leaked. Thus,
// no one is able to call `gen_free` on this pointer and thus safe to be used in the rest of
// its lifetime.
unsafe { &mut *Self::into_non_null_raw(boxed).as_ptr() }
}
}
impl<T> MBox<MaybeUninit<T>> {
/// Converts into an initialized box.
///
/// # Safety
///
/// The caller should guarantee `*self` is indeed initialized.
pub unsafe fn assume_init(self) -> MBox<T> {
MBox::from_non_null_raw(Self::into_non_null_raw(self).cast())
}
}
impl<T> From<T> for MBox<T> {
fn from(value: T) -> MBox<T> {
MBox::new(value)
}
}
impl<T: Clone> Clone for MBox<T> {
fn clone(&self) -> MBox<T> {
Self::new(self.deref().clone())
}
fn clone_from(&mut self, source: &Self) {
self.deref_mut().clone_from(source);
}
}
impl<T: Default> Default for MBox<T> {
fn default() -> MBox<T> {
MBox::new(T::default())
}
}
#[test]
fn test_single_object() {
let counter = DropCounter::default();
{
let mbox = MBox::new(counter.clone());
counter.assert_eq(0);
drop(mbox);
}
counter.assert_eq(1);
}
#[test]
fn test_into_raw() {
let mbox = MBox::new(66u8);
let raw = MBox::into_raw(mbox);
unsafe {
assert_eq!(*raw, 66u8);
gen_free(NonNull::new(raw).unwrap());
}
}
#[test]
fn test_clone() {
let counter = DropCounter::default();
{
let first_mbox = MBox::new(counter.clone());
{
let second_mbox = first_mbox.clone();
counter.assert_eq(0);
drop(second_mbox);
}
counter.assert_eq(1);
}
counter.assert_eq(2);
}
#[test]
fn test_clone_from() {
let counter = DropCounter::default();
{
let first_mbox = MBox::new(counter.clone());
{
let mut second_mbox = MBox::new(counter.clone());
counter.assert_eq(0);
second_mbox.clone_from(&first_mbox);
counter.assert_eq(1);
}
counter.assert_eq(2);
}
counter.assert_eq(3);
}
#[test]
fn test_no_drop_flag() {
fn do_test_for_drop_flag(branch: bool, expected: usize) {
let counter = DropCounter::default();
let inner_counter = counter.deref().clone();
{
let mbox;
if branch {
mbox = MBox::new(counter.clone());
let _ = &mbox;
}
inner_counter.assert_eq(0);
}
inner_counter.assert_eq(expected);
}
do_test_for_drop_flag(true, 1);
do_test_for_drop_flag(false, 0);
assert_eq!(
size_of::<MBox<DropCounter>>(),
size_of::<*mut DropCounter>()
);
}
#[cfg(feature = "std")]
#[test]
fn test_format() {
let a = MBox::new(3u64);
assert_eq!(format!("{:p}", a), format!("{:p}", MBox::as_ptr(&a)));
assert_eq!(format!("{}", a), "3");
assert_eq!(format!("{:?}", a), "3");
}
#[test]
fn test_standard_traits() {
let mut a = MBox::new(0u64);
assert_eq!(*a, 0);
*a = 3;
assert_eq!(*a, 3);
assert_eq!(*a.as_ref(), 3);
assert_eq!(*a.as_mut(), 3);
assert_eq!(*(a.borrow() as &u64), 3);
assert_eq!(*(a.borrow_mut() as &mut u64), 3);
assert!(a == MBox::new(3u64));
assert!(a != MBox::new(0u64));
assert!(a < MBox::new(4u64));
assert!(a > MBox::new(2u64));
assert!(a <= MBox::new(4u64));
assert!(a >= MBox::new(2u64));
assert_eq!(a.cmp(&MBox::new(7u64)), Ordering::Less);
assert_eq!(MBox::<u64>::default(), MBox::new(0u64));
}
#[test]
fn test_zero_sized_type() {
let a = MBox::new(());
assert!(!MBox::as_ptr(&a).is_null());
}
#[test]
fn test_non_zero() {
let b = 0u64;
assert!(!Some(MBox::new(0u64)).is_none());
assert!(!Some(MBox::new(())).is_none());
assert!(!Some(MBox::new(&b)).is_none());
assert_eq!(size_of::<Option<MBox<u64>>>(), size_of::<MBox<u64>>());
assert_eq!(size_of::<Option<MBox<()>>>(), size_of::<MBox<()>>());
assert_eq!(
size_of::<Option<MBox<&'static u64>>>(),
size_of::<MBox<&'static u64>>()
);
}
//}}}
//{{{ Slice helpers -------------------------------------------------------------------------------
mod slice_helper {
use super::*;
/// A `Vec`-like structure backed by `malloc()`.
pub struct MSliceBuilder<T> {
ptr: NonNull<T>,
cap: usize,
len: usize,
}
impl<T> MSliceBuilder<T> {
/// Creates a new slice builder with an initial capacity.
pub fn with_capacity(cap: usize) -> MSliceBuilder<T> {
MSliceBuilder {
ptr: gen_malloc(cap),
cap,
len: 0,
}
}
pub fn push(&mut self, obj: T) {
// SAFETY:
// - self.ptr is initialized from gen_malloc() so it can be placed into gen_realloc()
// - we guarantee that `self.ptr `points to an array of nonzero length `self.cap`, and
// the `if` condition ensures the invariant `self.len < self.cap`, so
// `self.ptr.add(self.len)` is always a valid (but uninitialized) object.
// - since `self.ptr[self.len]` is not yet initialized, we can `write()` into it safely.
unsafe {
if self.len >= self.cap {
if self.cap == 0 {
self.cap = 1;
} else {
self.cap *= 2;
}
self.ptr = gen_realloc(self.ptr, self.cap);
}
write(self.ptr.as_ptr().add(self.len), obj);
}
self.len += 1;
}
pub fn into_mboxed_slice(self) -> MBox<[T]> {
// SAFETY: `self.ptr` has been allocated by malloc(), and its length is self.cap
// (>= self.len).
let slice = unsafe { MBox::from_raw_parts(self.ptr.as_ptr(), self.len) };
forget(self);
slice
}
}
impl<T> MSliceBuilder<MaybeUninit<T>> {
/// Sets the length of the builder to the same as the capacity. The elements in the
/// uninitialized tail remains uninitialized.
pub fn set_len_to_cap(&mut self) {
self.len = self.cap;
}
}
impl<T> Drop for MSliceBuilder<T> {
fn drop(&mut self) {
unsafe {
gen_free(self.ptr);
}
}
}
}
use self::slice_helper::MSliceBuilder;
/// The iterator returned from `MBox<[T]>::into_iter()`.
pub struct MSliceIntoIter<T> {
ptr: NonNull<T>,
begin: usize,
end: usize,
}
impl<T> Iterator for MSliceIntoIter<T> {
type Item = T;
fn next(&mut self) -> Option<T> {
if self.begin == self.end {
None
} else {
unsafe {
let ptr = self.ptr.as_ptr().add(self.begin);
self.begin += 1;
Some(read(ptr))
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.end - self.begin;
(len, Some(len))
}
}
impl<T> DoubleEndedIterator for MSliceIntoIter<T> {
fn next_back(&mut self) -> Option<T> {
if self.begin == self.end {
None
} else {
unsafe {
self.end -= 1;
let ptr = self.ptr.as_ptr().add(self.end);
Some(read(ptr))
}
}
}
}
unsafe impl<T: Send> Send for MSliceIntoIter<T> {}
unsafe impl<T: Sync> Sync for MSliceIntoIter<T> {}
impl<T> ExactSizeIterator for MSliceIntoIter<T> {}
impl<T> Drop for MSliceIntoIter<T> {
fn drop(&mut self) {
unsafe {
let base = self.ptr.as_ptr().add(self.begin);
let len = self.end - self.begin;
let slice = from_raw_parts_mut(base, len) as *mut [T];
drop_in_place(slice);
gen_free(self.ptr);
}
}
}
//}}}
//{{{ Slice ---------------------------------------------------------------------------------------
impl<T> MBox<[T]> {
/// Constructs a new malloc-backed slice from the pointer and the length (number of items).
///
/// # Safety
///
/// `ptr` must be allocated via `malloc()` or similar C functions. It must not be null.
///
/// The `malloc`ed size of the pointer must be at least `len * size_of::<T>()`. The content
/// must already been initialized.
pub unsafe fn from_raw_parts(ptr: *mut T, len: usize) -> Self {
let ptr = from_raw_parts_mut(ptr, len) as *mut [T];
Self::from_raw(ptr)
}
/// Constructs a new boxed slice with uninitialized contents.
pub fn new_uninit_slice(len: usize) -> MBox<[MaybeUninit<T>]> {
let mut builder = MSliceBuilder::with_capacity(len);
builder.set_len_to_cap();
builder.into_mboxed_slice()
}
/// Decomposes the boxed slice into a pointer to the first element and the slice length.
pub fn into_raw_parts(mut self) -> (*mut T, usize) {
let len = self.len();
let ptr = self.as_mut_ptr();
forget(self);
(ptr, len)
}
}
impl<T> MBox<[MaybeUninit<T>]> {
/// Converts into an initialized boxed slice.
///
/// # Safety
///
/// The caller should guarantee `*self` is indeed initialized.
pub unsafe fn assume_init(self) -> MBox<[T]> {
MBox::from_raw(Self::into_raw(self) as *mut [T])
}
}
impl<T> Default for MBox<[T]> {
fn default() -> Self {
unsafe { Self::from_raw_parts(gen_malloc(0).as_ptr(), 0) }
}
}
impl<T: Clone> Clone for MBox<[T]> {
fn clone(&self) -> Self {
Self::from_slice(self)
}
}
impl<T: Clone> MBox<[T]> {
/// Creates a new `malloc`-boxed slice by cloning the content of an existing slice.
pub fn from_slice(slice: &[T]) -> MBox<[T]> {
let mut builder = MSliceBuilder::with_capacity(slice.len());
for item in slice {
builder.push(item.clone());
}
builder.into_mboxed_slice()
}
}
impl<T> FromIterator<T> for MBox<[T]> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let iter = iter.into_iter();
let (lower_size, upper_size) = iter.size_hint();
let initial_capacity = upper_size.unwrap_or(lower_size).max(1);
let mut builder = MSliceBuilder::with_capacity(initial_capacity);
for item in iter {
builder.push(item);
}
builder.into_mboxed_slice()
}
}
impl<T> IntoIterator for MBox<[T]> {
type Item = T;
type IntoIter = MSliceIntoIter<T>;
fn into_iter(self) -> MSliceIntoIter<T> {
let (ptr, len) = self.into_raw_parts();
MSliceIntoIter {
ptr: unsafe { NonNull::new_unchecked(ptr) },
begin: 0,
end: len,
}
}
}
impl<'a, T> IntoIterator for &'a MBox<[T]> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut MBox<[T]> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> IterMut<'a, T> {
self.iter_mut()
}
}
#[test]
fn test_slice() {
unsafe {
let slice_content = gen_malloc::<u64>(5).as_ptr();
*slice_content.offset(0) = 16458340076686561191;
*slice_content.offset(1) = 15635007859502065083;
*slice_content.offset(2) = 4845947824042606450;
*slice_content.offset(3) = 8907026173756975745;
*slice_content.offset(4) = 7378932587879886134;
let mbox = MBox::from_raw_parts(slice_content, 5);
assert_eq!(
&mbox as &[u64],
&[
16458340076686561191,
15635007859502065083,
4845947824042606450,
8907026173756975745,
7378932587879886134
]
);
}
}
#[test]
fn test_slice_with_drops() {
let counter = DropCounter::default();
unsafe {
let slice_content = gen_malloc::<DropCounter>(3).as_ptr();
{
write(slice_content.offset(0), counter.clone());
write(slice_content.offset(1), counter.clone());
write(slice_content.offset(2), counter.clone());
}
counter.assert_eq(0);
let mbox = MBox::from_raw_parts(slice_content, 3);
mbox[0].assert_eq(0);
mbox[1].assert_eq(0);
mbox[2].assert_eq(0);
assert_eq!(mbox.len(), 3);
}
counter.assert_eq(3);
}
#[cfg(nightly_channel)]
#[test]
fn test_coerce_unsized() {
let counter = DropCounter::default();
{
let pre_box = MBox::new([counter.clone(), counter.clone()]);
counter.assert_eq(0);
pre_box[0].assert_eq(0);
pre_box[1].assert_eq(0);
assert_eq!(pre_box.len(), 2);
let post_box: MBox<[DropCounter]> = pre_box;
counter.assert_eq(0);
post_box[0].assert_eq(0);
post_box[1].assert_eq(0);
assert_eq!(post_box.len(), 2);
}
counter.assert_eq(2);
}
#[test]
fn test_empty_slice() {
let mbox = MBox::<[DropCounter]>::default();
let sl: &[DropCounter] = &mbox;
assert_eq!(sl.len(), 0);
assert!(!sl.as_ptr().is_null());
}
#[cfg(nightly_channel)]
#[test]
fn test_coerce_from_empty_slice() {
let pre_box = MBox::<[DropCounter; 0]>::new([]);
assert_eq!(pre_box.len(), 0);
assert!(!pre_box.as_ptr().is_null());
let post_box: MBox<[DropCounter]> = pre_box;
let sl: &[DropCounter] = &post_box;
assert_eq!(sl.len(), 0);
assert!(!sl.as_ptr().is_null());
}
#[test]
fn test_clone_slice() {
let counter = DropCounter::default();
unsafe {
let slice_content = gen_malloc::<DropCounter>(3).as_ptr();
{
write(slice_content.offset(0), counter.clone());
write(slice_content.offset(1), counter.clone());
write(slice_content.offset(2), counter.clone());
}
let mbox = MBox::from_raw_parts(slice_content, 3);
assert_eq!(mbox.len(), 3);
{
let cloned_mbox = mbox.clone();
counter.assert_eq(0);
assert_eq!(cloned_mbox.len(), 3);
cloned_mbox[0].assert_eq(0);
cloned_mbox[1].assert_eq(0);
cloned_mbox[2].assert_eq(0);
}
counter.assert_eq(3);
mbox[0].assert_eq(3);
mbox[1].assert_eq(3);
mbox[2].assert_eq(3);
}
counter.assert_eq(6);
}
#[test]
fn test_from_iterator() {
let counter = DropCounter::default();
{
let slice = repeat(counter.clone()).take(18).collect::<MBox<[_]>>();
counter.assert_eq(1);
assert_eq!(slice.len(), 18);
for c in &slice {
c.assert_eq(1);
}
}
counter.assert_eq(19);
}
#[test]
fn test_into_iterator() {
let counter = DropCounter::default();
{
let slice = repeat(counter.clone()).take(18).collect::<MBox<[_]>>();
counter.assert_eq(1);
assert_eq!(slice.len(), 18);
for (i, c) in slice.into_iter().enumerate() {
c.assert_eq(1 + i);
}
}
counter.assert_eq(19);
}
#[cfg(feature = "std")]
#[test]
fn test_iter_properties() {
let slice = vec![1, 4, 9, 16, 25].into_iter().collect::<MBox<[_]>>();
let mut iter = slice.into_iter();
assert_eq!(iter.size_hint(), (5, Some(5)));
assert_eq!(iter.len(), 5);
assert_eq!(iter.next(), Some(1));
assert_eq!(iter.next_back(), Some(25));
assert_eq!(iter.size_hint(), (3, Some(3)));
assert_eq!(iter.len(), 3);
assert_eq!(iter.collect::<Vec<_>>(), vec![4, 9, 16]);
}
#[test]
fn test_iter_drop() {
let counter = DropCounter::default();
{
let slice = repeat(counter.clone()).take(18).collect::<MBox<[_]>>();
counter.assert_eq(1);
assert_eq!(slice.len(), 18);
let mut iter = slice.into_iter();
counter.assert_eq(1);
{
iter.next().unwrap().assert_eq(1)
};
{
iter.next().unwrap().assert_eq(2)
};
{
iter.next_back().unwrap().assert_eq(3)
};
counter.assert_eq(4);
}
counter.assert_eq(19);
}
#[test]
fn test_zst_slice() {
let slice = repeat(()).take(7).collect::<MBox<[_]>>();
let _ = slice.clone();
slice.into_iter();
}
#[test]
#[should_panic(expected = "panic on clone")]
fn test_panic_during_clone() {
let mbox = MBox::<PanicOnClone>::default();
let _ = mbox.clone();
}
#[test]
#[should_panic(expected = "panic on clone")]
fn test_panic_during_clone_from() {
let mut mbox = MBox::<PanicOnClone>::default();
let other = MBox::default();
mbox.clone_from(&other);
}
//}}}
//{{{ UTF-8 String --------------------------------------------------------------------------------
impl MBox<str> {
/// Constructs a new malloc-backed string from the pointer and the length (number of UTF-8 code
/// units).
///
/// # Safety
///
/// The `malloc`ed size of the pointer must be at least `len`. The content must already been
/// initialized and be valid UTF-8.
pub unsafe fn from_raw_utf8_parts_unchecked(value: *mut u8, len: usize) -> MBox<str> {
let bytes = from_raw_parts(value, len);
let string = from_utf8_unchecked(bytes) as *const str as *mut str;
Self::from_raw(string)
}
/// Constructs a new malloc-backed string from the pointer and the length (number of UTF-8 code
/// units). If the content does not contain valid UTF-8, this method returns an `Err`.
///
/// # Safety
///
/// The `malloc`ed size of the pointer must be at least `len`.
pub unsafe fn from_raw_utf8_parts(value: *mut u8, len: usize) -> Result<MBox<str>, Utf8Error> {
let bytes = from_raw_parts(value, len);
let string = from_utf8(bytes)? as *const str as *mut str;
Ok(Self::from_raw(string))
}
/// Converts the string into raw bytes.
pub fn into_bytes(self) -> MBox<[u8]> {
unsafe { MBox::from_raw(Self::into_raw(self) as *mut [u8]) }
}
/// Creates a string from raw bytes.
///
/// # Safety
///
/// The raw bytes must be valid UTF-8.
pub unsafe fn from_utf8_unchecked(bytes: MBox<[u8]>) -> MBox<str> {
Self::from_raw(MBox::into_raw(bytes) as *mut str)
}
/// Creates a string from raw bytes. If the content does not contain valid UTF-8, this method
/// returns an `Err`.
pub fn from_utf8(bytes: MBox<[u8]>) -> Result<MBox<str>, Utf8Error> {
unsafe {
let (ptr, len) = bytes.into_raw_parts();
Self::from_raw_utf8_parts(ptr, len)
}
}
}
impl Default for MBox<str> {
fn default() -> Self {
unsafe { Self::from_raw_utf8_parts_unchecked(gen_malloc(0).as_ptr(), 0) }
}
}
impl Clone for MBox<str> {
fn clone(&self) -> Self {
Self::from(&**self)
}
}
impl From<&str> for MBox<str> {
/// Creates a new `malloc`-boxed string by cloning the content of an existing string slice.
fn from(string: &str) -> Self {
let len = string.len();
unsafe {
let new_slice = gen_malloc(len).as_ptr();
copy_nonoverlapping(string.as_ptr(), new_slice, len);
Self::from_raw_utf8_parts_unchecked(new_slice, len)
}
}
}
#[test]
fn test_string_from_bytes() {
let bytes = MBox::from_slice(b"abcdef\xe4\xb8\x80\xe4\xba\x8c\xe4\xb8\x89");
let string = MBox::from_utf8(bytes).unwrap();
assert_eq!(&*string, "abcdef一二三");
assert_eq!(string, MBox::<str>::from("abcdef一二三"));
let bytes = string.into_bytes();
assert_eq!(&*bytes, b"abcdef\xe4\xb8\x80\xe4\xba\x8c\xe4\xb8\x89");
}
#[test]
fn test_non_utf8() {
let bytes = MBox::from_slice(b"\x88\x88\x88\x88");
let string = MBox::from_utf8(bytes);
assert!(string.is_err());
}
#[test]
fn test_default_str() {
assert_eq!(MBox::<str>::default(), MBox::<str>::from(""));
}
#[test]
#[should_panic(expected = "panic on clone")]
fn test_panic_on_clone_slice() {
let mbox: MBox<[PanicOnClone]> = once(PanicOnClone::default()).collect();
let _ = mbox.clone();
}
//}}}
|
mod actions;
extern crate telegram_bot;
//use self::telegram_bot::*;
pub fn resolve (command: &str) -> String {
let mut acao = command;
if command.to_lowercase().contains("dara é doida"){
acao = "/daraedoida";
}else if command.to_lowercase().contains("alana"){
acao = "/alanaacha";
}else if command.to_lowercase().contains("eita"){
acao = "/valeime";
}
match acao {
"/daraedoida" => actions::berro::run(),
"/alanaacha" => actions::daraedoida::run(),
"/valeime" => actions::valeime::run(),
_ => "".to_string(),
}
}
|
use bintree_strrepr::Tree;
pub fn main() {
let tree = Tree::from_string("a(b(d,e),c(,f(g,)))");
println!("{:?}", tree);
}
|
use core::marker::PhantomData;
use {Future, Poll, Async};
/// Future for the `from_err` combinator, changing the error type of a future.
///
/// This is created by the `Future::from_err` method.
#[derive(Debug)]
#[must_use = "futures do nothing unless polled"]
pub struct FromErr<A, E> where A: Future {
future: A,
f: PhantomData<E>
}
pub fn new<A, E>(future: A) -> FromErr<A, E>
where A: Future
{
FromErr {
future: future,
f: PhantomData
}
}
impl<A:Future, E:From<A::Error>> Future for FromErr<A, E> {
type Item = A::Item;
type Error = E;
fn poll(&mut self) -> Poll<A::Item, E> {
let e = match self.future.poll() {
Ok(Async::NotReady) => return Ok(Async::NotReady),
other => other,
};
e.map_err(From::from)
}
}
|
#[doc = "Reader of register OAR1"]
pub type R = crate::R<u32, super::OAR1>;
#[doc = "Writer for register OAR1"]
pub type W = crate::W<u32, super::OAR1>;
#[doc = "Register OAR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::OAR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `OA1_0`"]
pub type OA1_0_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1_0`"]
pub struct OA1_0_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `OA1_7_1`"]
pub type OA1_7_1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `OA1_7_1`"]
pub struct OA1_7_1_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_7_1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 1)) | (((value as u32) & 0x7f) << 1);
self.w
}
}
#[doc = "Reader of field `OA1_8_9`"]
pub type OA1_8_9_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `OA1_8_9`"]
pub struct OA1_8_9_W<'a> {
w: &'a mut W,
}
impl<'a> OA1_8_9_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
#[doc = "Reader of field `OA1MODE`"]
pub type OA1MODE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1MODE`"]
pub struct OA1MODE_W<'a> {
w: &'a mut W,
}
impl<'a> OA1MODE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `OA1EN`"]
pub type OA1EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OA1EN`"]
pub struct OA1EN_W<'a> {
w: &'a mut W,
}
impl<'a> OA1EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
impl R {
#[doc = "Bit 0 - Interface address"]
#[inline(always)]
pub fn oa1_0(&self) -> OA1_0_R {
OA1_0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 1:7 - Interface address"]
#[inline(always)]
pub fn oa1_7_1(&self) -> OA1_7_1_R {
OA1_7_1_R::new(((self.bits >> 1) & 0x7f) as u8)
}
#[doc = "Bits 8:9 - Interface address"]
#[inline(always)]
pub fn oa1_8_9(&self) -> OA1_8_9_R {
OA1_8_9_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bit 10 - Own Address 1 10-bit mode"]
#[inline(always)]
pub fn oa1mode(&self) -> OA1MODE_R {
OA1MODE_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 15 - Own Address 1 enable"]
#[inline(always)]
pub fn oa1en(&self) -> OA1EN_R {
OA1EN_R::new(((self.bits >> 15) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Interface address"]
#[inline(always)]
pub fn oa1_0(&mut self) -> OA1_0_W {
OA1_0_W { w: self }
}
#[doc = "Bits 1:7 - Interface address"]
#[inline(always)]
pub fn oa1_7_1(&mut self) -> OA1_7_1_W {
OA1_7_1_W { w: self }
}
#[doc = "Bits 8:9 - Interface address"]
#[inline(always)]
pub fn oa1_8_9(&mut self) -> OA1_8_9_W {
OA1_8_9_W { w: self }
}
#[doc = "Bit 10 - Own Address 1 10-bit mode"]
#[inline(always)]
pub fn oa1mode(&mut self) -> OA1MODE_W {
OA1MODE_W { w: self }
}
#[doc = "Bit 15 - Own Address 1 enable"]
#[inline(always)]
pub fn oa1en(&mut self) -> OA1EN_W {
OA1EN_W { w: self }
}
}
|
use regex::Regex;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::Path;
struct Program {
mask: String, // is making this a lifetime &str better?
memory: HashMap<u64, u64>,
}
impl Program {
fn new() -> Program {
Program {
mask: "".to_string(),
memory: HashMap::new(),
}
}
fn execute_instr(&mut self, instr: &String, volatile: bool) {
let instr: Vec<&str> = instr.split(" = ").collect();
let arg = instr[0];
let val = instr[1];
match &arg[0..3] {
"mem" => {
if !volatile {
self.mask_and_store(arg, val.parse().unwrap())
} else {
self.mask_and_store_volatile(arg, val.parse().unwrap())
}
}
"mas" => self.mask = val.to_owned(),
_ => panic!("Invalid instruction {}", arg),
}
}
fn mask_and_store(&mut self, arg: &str, val: u64) {
let re = Regex::new(r"\[([0-9]+)]").unwrap();
// get the number out of the address... eww
let addr: u64 = re
.captures(arg)
.unwrap()
.get(1)
.unwrap()
.as_str()
.parse()
.unwrap();
let ones_mask = self.mask.replace("X", "0");
let ones_mask = u64::from_str_radix(&ones_mask, 2).unwrap();
let zeros_mask = self.mask.replace("X", "1");
let zeros_mask = u64::from_str_radix(&zeros_mask, 2).unwrap();
let ones_res = ones_mask | val;
// the result is the zeros mask of the ones mask result
// doing it this chained way prevents the case where a don't care in the one's mask overrides a zero in the zeros' mask
let result = zeros_mask & ones_res;
self.memory.insert(addr, result);
}
fn mask_and_store_volatile(&mut self, arg: &str, val: u64) {
let re = Regex::new(r"\[([0-9]+)]").unwrap();
// get the number out of the address... eww
let addr: u64 = re
.captures(arg)
.unwrap()
.get(1)
.unwrap()
.as_str()
.parse()
.unwrap();
for addr in get_masked_addrs(addr, self.mask.clone()) {
self.memory.insert(addr, val);
}
}
}
fn get_masked_addrs(addr: u64, mask: String) -> HashSet<u64> {
let mask: Vec<char> = mask.chars().collect();
let addr_string: Vec<char> = format!("{:0>36b}", addr).chars().collect();
let masked_addr: Vec<char> = mask
.iter()
.enumerate()
.map(|(i, char)| match char {
'0' => addr_string[i],
_ => *char,
})
.collect();
permutations(masked_addr.iter().collect())
}
fn permutations(addr: String) -> HashSet<u64> {
let mut perms = HashSet::new();
if !addr.contains('X') {
// let addr: String = addr.iter().collect();
perms.insert(u64::from_str_radix(&addr, 2).unwrap());
return perms;
}
perms.extend(permutations(addr.replacen('X', "1", 1)).iter());
perms.extend(permutations(addr.replacen('X', "0", 1)).iter());
perms
}
fn read_lines<P>(filename: P) -> Vec<String>
where
P: AsRef<Path>,
{
let file = File::open(filename).expect("Error opening file!");
BufReader::new(file)
.lines()
.map(|line| line.expect("Error reading line!"))
.collect()
}
fn part1(lines: &Vec<String>) {
let mut program = Program::new();
for line in lines {
program.execute_instr(line, false);
}
let result = program.memory.iter().fold(0, |acc, (_, val)| acc + val);
println!("{}", result)
}
fn part2(lines: &Vec<String>) {
let mut program = Program::new();
for line in lines {
program.execute_instr(line, true);
}
let result = program.memory.iter().fold(0, |acc, (_, val)| acc + val);
println!("{}", result)
}
fn main() {
let lines = read_lines("input.txt");
part1(&lines);
part2(&lines);
}
|
use std::fmt::{Debug, Error, Formatter};
use std::ops::{Deref, DerefMut};
#[cfg(feature = "img")]
use image::*;
use libwebp_sys::{WebPFree, WebPPicture, WebPPictureFree};
/// This struct represents a safe wrapper around memory owned by libwebp.
/// Its data contents can be accessed through the Deref and DerefMut traits.
pub struct WebPMemory(pub(crate) *mut u8, pub(crate) usize);
impl Debug for WebPMemory {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_struct("WebpMemory").finish()
}
}
impl Drop for WebPMemory {
fn drop(&mut self) {
unsafe { WebPFree(self.0 as _) }
}
}
impl Deref for WebPMemory {
type Target = [u8];
fn deref(&self) -> &Self::Target {
unsafe { std::slice::from_raw_parts(self.0, self.1) }
}
}
impl DerefMut for WebPMemory {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { std::slice::from_raw_parts_mut(self.0, self.1) }
}
}
#[derive(Debug)]
pub(crate) struct ManageedPicture(pub(crate) WebPPicture);
impl Drop for ManageedPicture {
fn drop(&mut self) {
unsafe { WebPPictureFree(&mut self.0 as _) }
}
}
impl Deref for ManageedPicture {
type Target = WebPPicture;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for ManageedPicture {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// This struct represents a decoded image.
/// Its data contents can be accessed through the Deref and DerefMut traits.
/// It is also possible to create an image::DynamicImage from this struct.
pub struct WebPImage {
data: WebPMemory,
layout: PixelLayout,
width: u32,
height: u32,
}
impl WebPImage {
pub(crate) fn new(data: WebPMemory, layout: PixelLayout, width: u32, height: u32) -> Self {
Self {
data,
layout,
width,
height,
}
}
/// Creates a DynamicImage from this WebPImage.
#[cfg(feature = "img")]
pub fn to_image(&self) -> DynamicImage {
if self.layout.is_alpha() {
let image = ImageBuffer::from_raw(self.width, self.height, self.data.to_owned())
.expect("ImageBuffer couldn't be created");
DynamicImage::ImageRgba8(image)
} else {
let image = ImageBuffer::from_raw(self.width, self.height, self.data.to_owned())
.expect("ImageBuffer couldn't be created");
DynamicImage::ImageRgb8(image)
}
}
/// Returns the width of the image in pixels.
pub fn width(&self) -> u32 {
self.width
}
/// Returns the height of the image in pixels.
pub fn height(&self) -> u32 {
self.height
}
pub fn is_alpha(&self) -> bool {
self.layout.is_alpha()
}
}
impl Deref for WebPImage {
type Target = [u8];
fn deref(&self) -> &Self::Target {
self.data.deref()
}
}
impl DerefMut for WebPImage {
fn deref_mut(&mut self) -> &mut Self::Target {
self.data.deref_mut()
}
}
/// Describes the pixel layout (the order of the color channels) of an image.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum PixelLayout {
Rgb,
Rgba,
}
impl PixelLayout {
/// Returns true if the pixel contains an alpha channel.
pub fn is_alpha(self) -> bool {
self == PixelLayout::Rgba
}
}
|
#![windows_subsystem = "windows"]
use std::ffi::CString;
use core::ptr;
#[link(name = "user32")]
extern "stdcall" {
pub fn MessageBoxA(
hWnd: *const i8,
lpText: *const i8,
lpCaption: *const i8,
uType: u32
) -> i32;
}
fn main() {
let msg = CString::new("Hello from Rust").expect("");
let title = CString::new("Holy smokes!").expect("");
unsafe {
MessageBoxA(
ptr::null(),
msg.as_ptr(),
title.as_ptr(),
0);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.