text stringlengths 8 4.13M |
|---|
use hyper::{Client, Url};
use std::collections::BTreeSet;
use semver::VersionReq;
pub enum Target {
None,
Ref(String),
Commit(String),
Version(VersionReq)
}
pub struct Install {
package: String,
target: Target,
dependencies: Option<BTreeSet<String>>,
dev_dependencies: Option<BTreeSet<String>>
}
impl Install {
pub fn new(package: &str, target: Target) -> Install {
Install {
package: String::from(package),
target: target,
dependencies: None,
dev_dependencies: None,
}
}
pub fn execute(self) -> Result<Install, InstallError> {
Ok(try!(try!(try!(self.preinstall()).install()).postinstall()))
}
fn preinstall(self) -> Result<Install, InstallError> {
Ok(self)
}
fn install(self) -> Result<Install, InstallError> {
Ok(self)
}
fn postinstall(self) -> Result<Install, InstallError> {
Ok(self)
}
}
pub enum InstallError {
}
|
// This is forked from the above actix-web-middleware-redirect-https to also support "www." prefix
use actix_service::{Service, Transform};
use actix_web::{
dev::{ServiceRequest, ServiceResponse},
http, Error, HttpResponse,
};
use futures::{
future::{ok, Either, FutureResult},
Poll,
};
#[derive(Default, Clone)]
pub struct RedirMiddleware {
www: bool,
}
impl RedirMiddleware {
pub fn new(www: bool) -> Self {
RedirMiddleware {
www
}
}
}
impl<S, B> Transform<S> for RedirMiddleware
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = Error;
type InitError = ();
type Transform = RedirMiddlewareService<S>;
type Future = FutureResult<Self::Transform, Self::InitError>;
fn new_transform(&self, service: S) -> Self::Future {
ok(RedirMiddlewareService {
service,
www: self.www,
})
}
}
pub struct RedirMiddlewareService<S> {
service: S,
www: bool,
}
impl<S, B> Service for RedirMiddlewareService<S>
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = Error;
type Future = Either<S::Future, FutureResult<Self::Response, Self::Error>>;
fn poll_ready(&mut self) -> Poll<(), Self::Error> {
self.service.poll_ready()
}
fn call(&mut self, req: ServiceRequest) -> Self::Future {
let redir_host = {
if self.www {
let host = req.connection_info().host().to_owned();
let host_parts: Vec<_> = host.split('.').collect();
// todo: More robust
if host_parts.len() == 2 {
Some(format!("www.{}.{}", host_parts[0], host_parts[1]))
} else {
// Both localhost and www.example.com should go here
None
}
} else {
None
}
};
let redir_url = {
match redir_host {
Some(redir_host) => {
// Need to redirect anyway, so just add https
let path_and_query = match req.uri().path_and_query() {
Some(path_and_query) => path_and_query.as_str(),
None => ""
};
println!("Redir_host: {}, path_and_query: {}", redir_host, path_and_query);
Some(format!("https://{}{}", redir_host, path_and_query))
}
None => {
if req.connection_info().scheme() == "https" {
None
} else {
let path_and_query = match req.uri().path_and_query() {
Some(path_and_query) => path_and_query.as_str(),
None => ""
};
Some(format!("https://{}{}", req.connection_info().host(), path_and_query))
}
}
}
};
match redir_url {
Some(redir_url) => {
Either::B(ok(req.into_response(
HttpResponse::MovedPermanently()
.header(http::header::LOCATION, redir_url)
.finish()
.into_body(),
)))
}
None => {
Either::A(self.service.call(req))
}
}
}
}
|
use std::fmt::Display;
fn main() {
println!("{}", tpl(12, "気温", 22.4))
}
fn tpl<TX: Display, TY: Display, TZ: Display>(x: TX, y: TY, z: TZ) -> String {
format!("{}時の{}は{}", x, y, z)
}
|
use super::{BinaryType, CloseEvent, Result, WebSocket, WebSocketMessage};
use crate::app::Orders;
use std::marker::PhantomData;
use std::rc::Rc;
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
use web_sys::MessageEvent;
// ------ Callbacks ------
// `Callbacks` are used internally by `WebSocket` and `Builder`.
#[derive(Default, Debug)]
pub struct Callbacks {
pub on_open: Option<Closure<dyn Fn(JsValue)>>,
pub on_close: Option<Closure<dyn Fn(JsValue)>>,
pub on_error: Option<Closure<dyn Fn(JsValue)>>,
pub on_message: Option<Closure<dyn Fn(MessageEvent)>>,
}
// ------ Builder ------
/// `Builder` creates a new `WebSocket` instance.
///
/// # Example
///
/// ```rust,no_run
/// enum Msg { MessageReceived(WebSocketMessage) }
/// ...
/// let web_socket = WebSocket::builder("ws://127.0.0.1:9000/ws", orders)
/// .on_message(Msg::MessageReceived)
/// .build_and_open();
///```
pub struct Builder<'a, U: AsRef<str>, Ms: 'static, O: Orders<Ms>> {
url: U,
orders: &'a O,
callbacks: Callbacks,
protocols: &'a [&'a str],
binary_type: Option<BinaryType>,
phantom: PhantomData<Ms>,
}
impl<'a, U: AsRef<str>, Ms: 'static, O: Orders<Ms>> Builder<'a, U, Ms, O> {
// Note: `WebSocket::builder` is the preferred way how to crate a new `Builder` instance.
pub(crate) fn new(url: U, orders: &'a O) -> Self {
Self {
url,
orders,
callbacks: Callbacks::default(),
protocols: &[],
binary_type: None,
phantom: PhantomData,
}
}
/// Set preferred Web Socket sub-protocols.
///
/// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/WebSocket)
pub fn protocols(mut self, protocols: &'a [&'a str]) -> Self {
self.protocols = protocols;
self
}
/// Set binary data type to `ArrayBuffer`.
///
/// _Notes:_:
/// - Default binary type is `Blob`.
/// - For small binary messages, like CBOR, `ArrayBuffer` is more efficient than Blob handling.
///
/// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/binaryType)
pub fn use_array_buffers(mut self) -> Self {
self.binary_type = Some(BinaryType::Arraybuffer);
self
}
/// Set `on_open` Web Socket handler. The handler is called when connection's state changes
/// to `State::Open`; this indicates that the connection is ready to send and receive data.
///
/// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/onopen)
#[allow(clippy::missing_panics_doc)]
pub fn on_open<MsU: 'static>(
mut self,
handler: impl FnOnce() -> MsU + Clone + 'static,
) -> Self {
let handler = map_callback_return_to_option_ms!(
dyn Fn(JsValue) -> Option<Ms>,
// The event is generic - doesn't contain any useful information.
|_| handler.clone()(),
"WebSocket handler on_open can return only Msg, Option<Msg> or ()!",
Rc
);
let callback = create_js_handler(handler, self.orders);
self.callbacks.on_open = Some(callback);
self
}
/// Set `on_close` Web Socket handler. The handler is called when connection's state changes
/// to `State::Closed`.
///
/// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/onclose)
#[allow(clippy::missing_panics_doc)]
pub fn on_close<MsU: 'static>(
mut self,
handler: impl FnOnce(CloseEvent) -> MsU + Clone + 'static,
) -> Self {
let handler = map_callback_return_to_option_ms!(
dyn Fn(JsValue) -> Option<Ms>,
|event: JsValue| { handler.clone()(event.unchecked_into()) },
"WebSocket handler on_close can return only Msg, Option<Msg> or ()!",
Rc
);
let callback = create_js_handler(handler, self.orders);
self.callbacks.on_close = Some(callback);
self
}
/// Set `on_error` Web Socket handler.
///
/// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/onerror)
#[allow(clippy::missing_panics_doc)]
pub fn on_error<MsU: 'static>(
mut self,
handler: impl FnOnce() -> MsU + Clone + 'static,
) -> Self {
let handler = map_callback_return_to_option_ms!(
dyn Fn(JsValue) -> Option<Ms>,
// The event is generic - doesn't contain any useful information.
|_| handler.clone()(),
"WebSocket handler on_error can return only Msg, Option<Msg> or ()!",
Rc
);
let callback = create_js_handler(handler, self.orders);
self.callbacks.on_error = Some(callback);
self
}
/// Set `on_message` Web Socket handler. The handler is called when a message is received
/// from the server.
///
/// [MDN reference](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/onmessage)
#[allow(clippy::missing_panics_doc)]
pub fn on_message<MsU: 'static>(
mut self,
handler: impl FnOnce(WebSocketMessage) -> MsU + Clone + 'static,
) -> Self {
let handler = map_callback_return_to_option_ms!(
dyn Fn(MessageEvent) -> Option<Ms>,
|message_event: MessageEvent| {
let message = WebSocketMessage {
data: message_event.data(),
message_event,
};
handler.clone()(message)
},
"WebSocket handler on_message can return only Msg, Option<Msg> or ()!",
Rc
);
let callback = create_js_handler(handler, self.orders);
self.callbacks.on_message = Some(callback);
self
}
/// Create a new `WebSocket` instance from the `Builder` and try to open it.
///
/// # Errors
///
/// Returns `WebSocketError::OpenError` when Web Socket opening fails.
/// E.g. when the chosen port is blocked.
///
/// _Note:_: It doesn't return error when the socket is open on the client side,
/// but fails to connect to the server - use `on_error` handler to resolve such cases.
pub fn build_and_open(self) -> Result<WebSocket> {
WebSocket::new(
self.url.as_ref(),
self.callbacks,
self.protocols,
self.binary_type,
)
}
}
// ------ HELPERS ------
fn create_js_handler<T: wasm_bindgen::convert::FromWasmAbi + 'static, Ms: 'static>(
handler: Rc<dyn Fn(T) -> Option<Ms>>,
orders: &impl Orders<Ms>,
) -> Closure<dyn Fn(T)> {
let (app, msg_mapper) = (orders.clone_app(), orders.msg_mapper());
let mailbox = app.mailbox();
// @TODO replace with `Closure::new` once stable.
Closure::wrap(Box::new(move |data| {
mailbox.send(handler(data).map(|msg| msg_mapper(msg)));
}) as Box<dyn Fn(T)>)
}
|
use super::super::super::rand::prelude::SliceRandom;
use crate::construction::heuristics::InsertionContext;
use crate::construction::heuristics::*;
use crate::models::problem::Job;
use crate::solver::mutation::recreate::Recreate;
use crate::solver::mutation::{ConfigurableRecreate, PhasedRecreate};
use crate::solver::population::SelectionPhase;
use crate::solver::RefinementContext;
use std::sync::Arc;
/// A recreate method which skips random jobs and routes.
pub struct RecreateWithSkipRandom {
recreate: ConfigurableRecreate,
}
impl Default for RecreateWithSkipRandom {
fn default() -> Self {
Self {
recreate: ConfigurableRecreate::new(
Box::new(SkipRandomJobSelector::default()),
Box::new(SkipRandomRouteSelector::default()),
Box::new(BestResultSelector::default()),
Default::default(),
),
}
}
}
impl Recreate for RecreateWithSkipRandom {
fn run(&self, refinement_ctx: &RefinementContext, insertion_ctx: InsertionContext) -> InsertionContext {
self.recreate.run(refinement_ctx, insertion_ctx)
}
}
impl RecreateWithSkipRandom {
/// Creates `RecreateWithSkipRandom` as PhasedRecreate which runs only in exploration phase.
pub fn default_explorative_phased(default_recreate: Arc<dyn Recreate + Send + Sync>) -> PhasedRecreate {
let recreates = vec![
(SelectionPhase::Initial, default_recreate.clone()),
(SelectionPhase::Exploration, Arc::new(RecreateWithSkipRandom::default())),
(SelectionPhase::Exploitation, default_recreate),
];
PhasedRecreate { recreates: recreates.into_iter().collect() }
}
}
struct SkipRandomJobSelector {}
impl Default for SkipRandomJobSelector {
fn default() -> Self {
Self {}
}
}
impl JobSelector for SkipRandomJobSelector {
fn select<'a>(&'a self, ctx: &'a mut InsertionContext) -> Box<dyn Iterator<Item = Job> + 'a> {
ctx.solution.required.shuffle(&mut ctx.environment.random.get_rng());
let skip = ctx.environment.random.uniform_int(2, 8) as usize;
Box::new(ctx.solution.required.iter().skip(skip).cloned())
}
}
struct SkipRandomRouteSelector {}
impl Default for SkipRandomRouteSelector {
fn default() -> Self {
Self {}
}
}
impl RouteSelector for SkipRandomRouteSelector {
fn select<'a>(
&'a self,
ctx: &'a mut InsertionContext,
_jobs: &[Job],
) -> Box<dyn Iterator<Item = RouteContext> + 'a> {
ctx.solution.routes.shuffle(&mut ctx.environment.random.get_rng());
let skip = ctx.environment.random.uniform_int(0, 4);
let skip = match (skip > ctx.solution.routes.len() as i32, ctx.solution.routes.len() > 1) {
(true, true) => (skip - 1) as usize,
(false, true) => 1,
_ => 0,
};
Box::new(ctx.solution.routes.iter().skip(skip).cloned().chain(ctx.solution.registry.next()))
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
use myrias::{router, Config};
use rocket::{catchers, routes};
fn main() {
std::env::set_var("ROCKET_CLI_COLORS", "off");
let config = Config::from_file("Config.toml");
rocket::ignite()
.manage(config)
.register(catchers![
router::not_found::index,
router::gateway_timeout::index,
router::internal_server_error::index
])
.mount(
"/",
routes![
router::languages::index,
router::create_container::index,
router::eval::index,
router::containers::index,
router::cleanup::index,
],
)
.launch();
}
|
fn main() {
manhanttanDistance(361527);
fillSquares(361527);
}
fn manhanttanDistance(input: usize) {
let mut total = 1;
let mut level = 1;
while total < input {
level += 2;
total = level * level;
}
let offset = total - input;
let distance_from_input_to_corner = offset % (level - 1);
let distance_from_corner_to_center = (level - 1) / 2;
let distance_from_center_to_input = if level / 2 > distance_from_input_to_corner {
level / 2 - distance_from_input_to_corner
} else {
distance_from_input_to_corner - level / 2
};
println!("{:?}", distance_from_corner_to_center + distance_from_center_to_input);
}
fn fillSquares(input: usize) {
let mut step = 1;
let mut current_number: i32 = 1;
let mut x: i32 = 0;
let mut y: i32 = 0;
let mut current_values = vec![[x, y, current_number]];
while current_number < input as i32 {
// Move right by step
for i in 0..step {
x += 1;
current_number = sumNeighbours(&[x, y, 0], ¤t_values);
current_values.push([x, y, current_number]);
}
// Move up by step
for i in 0..step {
y += 1;
current_number = sumNeighbours(&[x, y, 0], ¤t_values);
current_values.push([x, y, current_number]);
}
// Move left by step + 1
for i in 0..(step + 1) {
x -= 1;
current_number = sumNeighbours(&[x, y, 0], ¤t_values);
current_values.push([x, y, current_number]);
}
// Move down by step + 1
for i in 0..(step + 1) {
y -= 1;
current_number = sumNeighbours(&[x, y, 0], ¤t_values);
current_values.push([x, y, current_number]);
}
step += 2;
}
println!("{:?}", current_values);
}
fn sumNeighbours(input: &[i32; 3], current_values: &Vec<[i32; 3]>) -> i32 {
let input_x = input[0];
let input_y = input[1];
let mut result = 0;
for value in current_values {
let x = value[0];
let y = value[1];
let num = value[2];
if (input_x - x).abs() <= 1 && (input_y - y).abs() <= 1 {
result += num;
}
}
return result;
} |
header! {
/// 'Origin' request header,
/// part of [CORS](http://www.w3.org/TR/cors/#origin-request-header)
///
/// The 'Origin' header indicates where the cross-origin request
/// or preflight request originates from.
///
/// # ABNF
/// ```plain
/// Origin = url
/// ```
///
/// # Example value
/// * `http://google.com`
///
/// # Example
/// ```
/// use hyper::header::{Headers, Origin};
///
/// let mut headers = Headers::new();
/// headers.set(Origin("http://www.example.com".to_owned()));
/// ```
(Origin, "Origin") => [String]
test_orgin {
test_header!(test1, vec![b"http://google.com/"]);
}
}
|
use actix_web::{HttpResponse, web, post};
use serde::Serialize;
use crate::appdata::AppData;
use rand::Rng;
use crate::common::Statistics;
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Response {
status: u16,
#[serde(skip_serializing_if = "Option::is_none")]
new_uuid: Option<String>
}
#[post("/report")]
pub async fn report_stat(data: web::Data<AppData>, body: web::Json<Statistics>) -> HttpResponse {
if body.uuid.is_empty() {
let new_uuid: String = rand::thread_rng().sample_iter(rand::distributions::Alphanumeric).take(64).map(char::from).collect();
return HttpResponse::Ok().body(serde_json::to_string(&Response { status: 409, new_uuid: Some(new_uuid) }).unwrap());
}
match &data.tx.send(body.clone()) {
Ok(_) => {},
Err(e) => {
eprintln!("Failed to send Statistics over Channel: {:?}", e);
}
}
HttpResponse::Ok().body(serde_json::to_string(&Response { status: 200, new_uuid: None }).unwrap())
} |
use super::*;
use std::path::Path;
use variant_reader::VariantType;
#[test]
fn insertion_test() {
let mut variants = read_indexed_vcf(
Path::new("tests/resources/insertion.vcf.gz"),
String::from("11"),
887340,
887350,
);
let var = variants.pop().unwrap();
let allel = String::from("TAAAAC");
let test_variant = Variant {
marker_type: String::from("Variant"),
reference: String::from("T"),
alternatives: Some(allel),
start_position: 887343 as f64, // - 1 due to 0-basing, - 0.5 due to start pos
end_position: 887344 as f64, // -1 due to 0-basing, + 0.5 du to end pos
var_type: VariantType::Insertion,
};
assert_eq!(var, test_variant);
}
#[test]
fn duplicate_test() {
let mut variants = read_indexed_vcf(
Path::new("tests/resources/duplicate.vcf.gz"),
String::from("11"),
150000,
151000,
);
let var = variants.pop().unwrap();
let allel = String::from("ATCATC");
let test_variant = Variant {
marker_type: String::from("Variant"),
reference: String::from("ATC"),
alternatives: Some(allel),
start_position: 150188.5 as f64, // - 1 due to 0-basing, - 0.5 due to start pos
end_position: 150191.5 as f64, // -1 due to 0-basing, + 0.5 du to end pos
var_type: VariantType::Duplicate,
};
assert_eq!(var, test_variant);
}
#[test]
fn inversion_test() {
let mut variants = read_indexed_vcf(
Path::new("tests/resources/inversion.vcf.gz"),
String::from("11"),
150000,
151000,
);
let var = variants.pop().unwrap();
let allel = String::from("CTTAG");
let test_variant = Variant {
marker_type: String::from("Variant"),
reference: String::from("GATTC"),
alternatives: Some(allel),
start_position: 150186.5 as f64, // - 1 due to 0-basing, - 0.5 due to start pos
end_position: 150191.5 as f64, // -1 due to 0-basing, + 0.5 du to end pos
var_type: VariantType::Inversion,
};
assert_eq!(var, test_variant);
}
#[test]
fn deletion_test() {
let mut variants = read_indexed_vcf(
Path::new("tests/resources/deletion.vcf.gz"),
String::from("11"),
150000,
151000,
);
let var = variants.pop().unwrap();
let test_variant = Variant {
marker_type: String::from("Variant"),
reference: String::from("G"),
alternatives: None,
start_position: 150186.5 as f64, // - 1 due to 0-basing, - 0.5 due to start pos
end_position: 150773.5 as f64, // -1 due to 0-basing, + 0.5 du to end pos
var_type: VariantType::Deletion,
};
assert_eq!(var, test_variant);
}
|
use core::mem::size_of;
use core::ptr;
use core::slice;
#[repr(packed)]
#[derive(Clone, Copy, Debug, Default)]
pub struct SDTHeader {
pub signature: [u8; 4],
pub length: u32,
pub revision: u8,
pub checksum: u8,
pub oemid: [u8; 6],
pub oemtableid: [u8; 8],
pub oemrevision: u32,
pub creatorid: u32,
pub creatorrevision: u32,
}
impl SDTHeader {
pub fn valid(&self, signature: &str) -> bool {
if self.signature == signature.as_bytes() {
let ptr = (self as *const Self) as *const u8;
let sum: u8 = (0..self.length as isize)
.fold(0, |sum, i| sum + unsafe { ptr::read(ptr.offset(i)) });
sum == 0
} else {
false
}
}
pub fn data<T>(&self) -> &'static [T] {
let ptr = ((self as *const Self) as usize + size_of::<Self>()) as *const T;
let len = (self.length as usize - size_of::<Self>()) / size_of::<T>();
unsafe { slice::from_raw_parts(ptr, len) }
}
}
|
use actix_web::web::ServiceConfig;
use actix_web::{delete, get, patch, post, web, Error, HttpResponse};
use crate::models::user::{RegisterUser, User};
use crate::db::{auth as db, PgPool};
pub fn endpoints(config: &mut ServiceConfig) {
config.service(signup);
}
#[post("/api/auth/signup")]
pub async fn signup(web::Json(user): web::Json<RegisterUser>, pool: web::Data<PgPool>) -> Result<HttpResponse, Error> {
let conn = pool.get().unwrap();
match db::get_by_username(&user.username, &conn) {
Ok(_) => {
Ok(HttpResponse::Conflict().json("Username already exists"))
}
Err(_) => {
unimplemented!()
}
}
} |
#[doc = "Register `BRR` reader"]
pub type R = crate::R<BRR_SPEC>;
#[doc = "Register `BRR` writer"]
pub type W = crate::W<BRR_SPEC>;
#[doc = "Field `BR0` reader - Port Reset bit"]
pub type BR0_R = crate::BitReader<BR0W_A>;
#[doc = "Port Reset bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BR0W_A {
#[doc = "0: No action on the corresponding ODx bit"]
NoAction = 0,
#[doc = "1: Reset the ODx bit"]
Reset = 1,
}
impl From<BR0W_A> for bool {
#[inline(always)]
fn from(variant: BR0W_A) -> Self {
variant as u8 != 0
}
}
impl BR0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BR0W_A {
match self.bits {
false => BR0W_A::NoAction,
true => BR0W_A::Reset,
}
}
#[doc = "No action on the corresponding ODx bit"]
#[inline(always)]
pub fn is_no_action(&self) -> bool {
*self == BR0W_A::NoAction
}
#[doc = "Reset the ODx bit"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == BR0W_A::Reset
}
}
#[doc = "Field `BR0` writer - Port Reset bit"]
pub type BR0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, BR0W_A>;
impl<'a, REG, const O: u8> BR0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No action on the corresponding ODx bit"]
#[inline(always)]
pub fn no_action(self) -> &'a mut crate::W<REG> {
self.variant(BR0W_A::NoAction)
}
#[doc = "Reset the ODx bit"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(BR0W_A::Reset)
}
}
#[doc = "Field `BR1` reader - Port Reset bit"]
pub use BR0_R as BR1_R;
#[doc = "Field `BR2` reader - Port Reset bit"]
pub use BR0_R as BR2_R;
#[doc = "Field `BR3` reader - Port Reset bit"]
pub use BR0_R as BR3_R;
#[doc = "Field `BR4` reader - Port Reset bit"]
pub use BR0_R as BR4_R;
#[doc = "Field `BR5` reader - Port Reset bit"]
pub use BR0_R as BR5_R;
#[doc = "Field `BR6` reader - Port Reset bit"]
pub use BR0_R as BR6_R;
#[doc = "Field `BR13` reader - Port Reset bit"]
pub use BR0_R as BR13_R;
#[doc = "Field `BR14` reader - Port Reset bit"]
pub use BR0_R as BR14_R;
#[doc = "Field `BR15` reader - Port Reset bit"]
pub use BR0_R as BR15_R;
#[doc = "Field `BR1` writer - Port Reset bit"]
pub use BR0_W as BR1_W;
#[doc = "Field `BR2` writer - Port Reset bit"]
pub use BR0_W as BR2_W;
#[doc = "Field `BR3` writer - Port Reset bit"]
pub use BR0_W as BR3_W;
#[doc = "Field `BR4` writer - Port Reset bit"]
pub use BR0_W as BR4_W;
#[doc = "Field `BR5` writer - Port Reset bit"]
pub use BR0_W as BR5_W;
#[doc = "Field `BR6` writer - Port Reset bit"]
pub use BR0_W as BR6_W;
#[doc = "Field `BR13` writer - Port Reset bit"]
pub use BR0_W as BR13_W;
#[doc = "Field `BR14` writer - Port Reset bit"]
pub use BR0_W as BR14_W;
#[doc = "Field `BR15` writer - Port Reset bit"]
pub use BR0_W as BR15_W;
impl R {
#[doc = "Bit 0 - Port Reset bit"]
#[inline(always)]
pub fn br0(&self) -> BR0_R {
BR0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Port Reset bit"]
#[inline(always)]
pub fn br1(&self) -> BR1_R {
BR1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Port Reset bit"]
#[inline(always)]
pub fn br2(&self) -> BR2_R {
BR2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Port Reset bit"]
#[inline(always)]
pub fn br3(&self) -> BR3_R {
BR3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Port Reset bit"]
#[inline(always)]
pub fn br4(&self) -> BR4_R {
BR4_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Port Reset bit"]
#[inline(always)]
pub fn br5(&self) -> BR5_R {
BR5_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Port Reset bit"]
#[inline(always)]
pub fn br6(&self) -> BR6_R {
BR6_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 13 - Port Reset bit"]
#[inline(always)]
pub fn br13(&self) -> BR13_R {
BR13_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Port Reset bit"]
#[inline(always)]
pub fn br14(&self) -> BR14_R {
BR14_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Port Reset bit"]
#[inline(always)]
pub fn br15(&self) -> BR15_R {
BR15_R::new(((self.bits >> 15) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br0(&mut self) -> BR0_W<BRR_SPEC, 0> {
BR0_W::new(self)
}
#[doc = "Bit 1 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br1(&mut self) -> BR1_W<BRR_SPEC, 1> {
BR1_W::new(self)
}
#[doc = "Bit 2 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br2(&mut self) -> BR2_W<BRR_SPEC, 2> {
BR2_W::new(self)
}
#[doc = "Bit 3 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br3(&mut self) -> BR3_W<BRR_SPEC, 3> {
BR3_W::new(self)
}
#[doc = "Bit 4 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br4(&mut self) -> BR4_W<BRR_SPEC, 4> {
BR4_W::new(self)
}
#[doc = "Bit 5 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br5(&mut self) -> BR5_W<BRR_SPEC, 5> {
BR5_W::new(self)
}
#[doc = "Bit 6 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br6(&mut self) -> BR6_W<BRR_SPEC, 6> {
BR6_W::new(self)
}
#[doc = "Bit 13 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br13(&mut self) -> BR13_W<BRR_SPEC, 13> {
BR13_W::new(self)
}
#[doc = "Bit 14 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br14(&mut self) -> BR14_W<BRR_SPEC, 14> {
BR14_W::new(self)
}
#[doc = "Bit 15 - Port Reset bit"]
#[inline(always)]
#[must_use]
pub fn br15(&mut self) -> BR15_W<BRR_SPEC, 15> {
BR15_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "GPIO port bit reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`brr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`brr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct BRR_SPEC;
impl crate::RegisterSpec for BRR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`brr::R`](R) reader structure"]
impl crate::Readable for BRR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`brr::W`](W) writer structure"]
impl crate::Writable for BRR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets BRR to value 0"]
impl crate::Resettable for BRR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::fs::File;
use std::io::{self, BufRead};
fn valid_part1(line:&str) -> bool {
let policy_passwd:Vec<&str> = line.split(":").take(2).collect();
let policy = policy_passwd[0];
let passwd = policy_passwd[1];
let policy_fs:Vec<&str> = policy.split(" ").take(2).collect();
let range:Vec<&str> = policy_fs[0].split("-").collect();
let check = &policy_fs[1].chars().next().unwrap();
let start = range[0].parse().unwrap();
let end = range[1].parse().unwrap();
let count = passwd.chars().filter(|c| c == check).count();
return count >= start && count <= end;
}
fn valid_part2(line:&str) -> bool {
let policy_passwd:Vec<&str> = line.split(":").take(2).collect();
let policy = policy_passwd[0];
let passwd = policy_passwd[1].trim();
let policy_fs:Vec<&str> = policy.split(" ").take(2).collect();
let range:Vec<&str> = policy_fs[0].split("-").collect();
let check = policy_fs[1].chars().next().unwrap();
let start = range[0].parse::<usize>().unwrap() - 1;
let end = range[1].parse::<usize>().unwrap() - 1;
let pwd:Vec<char> = passwd.chars().collect();
// dbg!(passwd, start, end, pwd[start], pwd[end]);
return (pwd[start] == check || pwd[end] == check) &&
!(pwd[start] == check && pwd[end] == check);
}
fn part_n(f: &dyn Fn(&str) -> bool) -> usize {
let file = File::open("data/d02.txt").unwrap();
return io::BufReader::new(file)
.lines()
.map(|l| l.unwrap())
.filter(|l| f(l))
.count();
}
pub fn part1() -> usize {
return part_n(&valid_part1);
}
pub fn part2() -> usize {
return part_n(&valid_part2);
}
|
pub mod sub; // 2. 这段放到单独文件, 并 pub mod
pub mod hdl {
pub fn say2(ss: &str) {
println!("say2: {:?}", ss)
}
}
pub fn say(ss: &str) {
println!("say: {:?}", ss)
}
|
mod common;
use std::collections::HashSet;
fn part1(entries: &[i32]) -> i32 {
let set: HashSet<_> = entries.iter().copied().collect();
for e1 in entries {
let e2 = 2020 - e1;
if set.contains(&e2) {
return e1 * e2;
}
}
panic!("no matching entries found")
}
fn part2(entries: &[i32]) -> i32 {
let set: HashSet<_> = entries.iter().copied().collect();
for (i, e1) in entries.iter().enumerate() {
for e2 in &entries[i + 1..] {
let e3 = 2020 - e1 - e2;
if set.contains(&e3) {
return e1 * e2 * e3;
}
}
}
panic!("no matching entries found")
}
fn main() {
let input: Vec<i32> = common::get_input();
println!("Part1: {}", part1(&input));
println!("Part2: {}", part2(&input));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1(&[1721, 979, 366, 299, 675, 1456]), 514579);
}
#[test]
fn test_part2() {
assert_eq!(part2(&[1721, 979, 366, 299, 675, 1456]), 241861950);
}
}
|
#[doc = "Reader of register NEXT_CONN"]
pub type R = crate::R<u32, super::NEXT_CONN>;
#[doc = "Writer for register NEXT_CONN"]
pub type W = crate::W<u32, super::NEXT_CONN>;
#[doc = "Register NEXT_CONN `reset()`'s with value 0"]
impl crate::ResetValue for super::NEXT_CONN {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `NEXT_CONN_INDEX`"]
pub type NEXT_CONN_INDEX_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `NEXT_CONN_INDEX`"]
pub struct NEXT_CONN_INDEX_W<'a> {
w: &'a mut W,
}
impl<'a> NEXT_CONN_INDEX_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x1f) | ((value as u32) & 0x1f);
self.w
}
}
#[doc = "Reader of field `NEXT_CONN_TYPE`"]
pub type NEXT_CONN_TYPE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NEXT_CONN_TYPE`"]
pub struct NEXT_CONN_TYPE_W<'a> {
w: &'a mut W,
}
impl<'a> NEXT_CONN_TYPE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `NI_VALID`"]
pub type NI_VALID_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NI_VALID`"]
pub struct NI_VALID_W<'a> {
w: &'a mut W,
}
impl<'a> NI_VALID_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
impl R {
#[doc = "Bits 0:4 - Connection Index to be serviced. Allowed values are 0,1,2,3."]
#[inline(always)]
pub fn next_conn_index(&self) -> NEXT_CONN_INDEX_R {
NEXT_CONN_INDEX_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bit 5 - Connection type 1 - Master Connection 0 - Slave Connection"]
#[inline(always)]
pub fn next_conn_type(&self) -> NEXT_CONN_TYPE_R {
NEXT_CONN_TYPE_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Flag indication if programmed NI_TIMER is valid. FW sets this bit to indicate that the NI_TIMER is programmed. HW clears this bit on servicing the connection of if NI_TIMER is pointing to past value"]
#[inline(always)]
pub fn ni_valid(&self) -> NI_VALID_R {
NI_VALID_R::new(((self.bits >> 6) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:4 - Connection Index to be serviced. Allowed values are 0,1,2,3."]
#[inline(always)]
pub fn next_conn_index(&mut self) -> NEXT_CONN_INDEX_W {
NEXT_CONN_INDEX_W { w: self }
}
#[doc = "Bit 5 - Connection type 1 - Master Connection 0 - Slave Connection"]
#[inline(always)]
pub fn next_conn_type(&mut self) -> NEXT_CONN_TYPE_W {
NEXT_CONN_TYPE_W { w: self }
}
#[doc = "Bit 6 - Flag indication if programmed NI_TIMER is valid. FW sets this bit to indicate that the NI_TIMER is programmed. HW clears this bit on servicing the connection of if NI_TIMER is pointing to past value"]
#[inline(always)]
pub fn ni_valid(&mut self) -> NI_VALID_W {
NI_VALID_W { w: self }
}
}
|
use crate::{Config, FormatOptions};
use crate::{Result, TaskpaperFile};
use path_absolutize::Absolutize;
use std::cmp;
use std::collections::HashMap;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
#[derive(Debug)]
enum SortDir {
Desc,
Asc,
}
#[derive(Debug)]
struct SortBy {
key: String,
dir: SortDir,
}
fn get_sort_values(
tpf: &TaskpaperFile,
sorting_set: &[SortBy],
node_id: &crate::NodeId,
path: &Path,
line_no: usize,
) -> Vec<Option<String>> {
let mut values = Vec::new();
let tags = tpf[node_id].item().tags();
for s in sorting_set {
match tags.get(&s.key) {
None => values.push(None),
Some(t) => match &t.value {
None => values.push(None),
Some(v) => values.push(Some(v.to_string())),
},
}
}
// As tiebreaker, we use (path, string)
values.push(Some(path.to_string_lossy().to_string()));
values.push(Some(format!("{:05}", line_no)));
values
}
pub struct Match<'a> {
pub tpf: &'a TaskpaperFile,
pub path: &'a Path,
pub line_no: usize,
pub node_id: crate::NodeId,
}
// TODO(hrapp): This seems messy - on the one site, this should be part of the Database, on the
// other site this is used in the App too. It is also questionable if all files should be searched
// or only one.
pub fn search<'a>(
mut query: String,
sort_by: Option<&str>,
config: &Config,
files_map: &'a HashMap<PathBuf, impl AsRef<TaskpaperFile>>,
) -> Result<Vec<Match<'a>>> {
'outer: for _ in 0..50 {
for (key, value) in &config.aliases {
let new_query = query.replace(key, value);
if new_query != query {
query = new_query;
continue 'outer;
}
}
}
let sort_order = sort_by.as_ref().map(|s| {
let mut res = Vec::new();
for entry in s.split(",") {
let entry = entry.trim();
if entry.starts_with("-") {
res.push(SortBy {
key: entry.trim_start_matches("-").to_string(),
dir: SortDir::Desc,
})
} else {
res.push(SortBy {
key: entry.to_string(),
dir: SortDir::Asc,
})
}
}
res
});
let mut files = Vec::new();
for (path, tpf) in files_map {
if let Some(name) = path.file_name() {
if config
.search
.excluded_files
.contains(name.to_string_lossy().as_ref())
{
continue;
}
}
files.push((path, tpf));
}
let mut searches: HashMap<&Path, _> = HashMap::new();
for (path, tpf) in files {
searches.insert(path as &Path, (tpf.as_ref().search(&query)?, tpf.as_ref()));
}
let mut matches = Vec::new();
for path in searches.keys() {
let (node_ids, tpf) = &searches[&path as &Path];
if node_ids.is_empty() {
continue;
}
for node_id in node_ids.iter() {
let item = tpf[node_id].item();
matches.push(Match {
tpf,
path,
line_no: item.line_index().unwrap() + 1,
node_id: node_id.clone(),
});
}
}
if let Some(ref s) = sort_order {
matches.sort_by(|a, b| {
let val_a = get_sort_values(a.tpf, &s, &a.node_id, a.path, a.line_no);
let val_b = get_sort_values(b.tpf, &s, &b.node_id, b.path, b.line_no);
for (idx, s) in s.iter().enumerate() {
let res = match s.dir {
SortDir::Asc => val_a[idx].cmp(&val_b[idx]),
SortDir::Desc => val_b[idx].cmp(&val_a[idx]),
};
match res {
cmp::Ordering::Less | cmp::Ordering::Greater => return res,
cmp::Ordering::Equal => (),
}
}
cmp::Ordering::Equal
});
}
Ok(matches)
}
/// A folder containing many Taskpaper files. Some of which are special, like inbox, timeline.
#[derive(Debug)]
pub struct Database {
pub root: PathBuf,
}
impl Database {
pub fn from_dir(dir: impl AsRef<Path>) -> Result<Self> {
let root = dir.as_ref().absolutize()?.to_path_buf();
Ok(Self { root })
}
pub fn config(&self) -> Result<Config> {
let data = std::fs::read_to_string(self.root.join(".config.toml"))?;
Ok(toml::from_str(&data).map_err(|e| crate::Error::InvalidConfig(e.to_string()))?)
}
pub fn parse_all_files(&self) -> Result<HashMap<PathBuf, TaskpaperFile>> {
let mut files = HashMap::new();
for entry in WalkDir::new(&self.root) {
if entry.is_err() {
continue;
}
let entry = entry.unwrap();
let path = entry.path();
if path.extension() != Some(OsStr::new("taskpaper")) {
continue;
}
let file = TaskpaperFile::parse_file(path);
if file.is_err() {
println!("Skipping {:?} due to parsing errors.", path);
continue;
}
let relative_path = entry.path().strip_prefix(&self.root).unwrap().to_path_buf();
files.insert(relative_path, file.unwrap());
}
Ok(files)
}
/// Returns the name (i.e. relative path) of 'path' inside of the database.
pub fn relative(&self, path: impl AsRef<Path>) -> Option<PathBuf> {
let canon = match path.as_ref().absolutize() {
Err(_) => return None,
Ok(a) => a,
};
let rel = match canon.strip_prefix(&self.root) {
Err(_) => return None,
Ok(a) => a,
};
if rel == canon {
None
} else {
Some(rel.to_path_buf())
}
}
pub fn parse_common_file(&self, kind: CommonFileKind) -> Result<TaskpaperFile> {
TaskpaperFile::parse_file(kind.find(&self.root).expect("Common file not found!"))
}
pub fn get_format_for_filename(&self, path: impl AsRef<Path>) -> Result<FormatOptions> {
let stem = path
.as_ref()
.file_stem()
.expect("Always a filestem")
.to_string_lossy();
let config = self.config()?;
for name in [stem.as_ref(), "default"] {
if let Some(f) = config.formats.get(name) {
return Ok(f.clone());
}
}
Ok(FormatOptions::default())
}
pub fn overwrite_common_file(&self, tpf: &TaskpaperFile, kind: CommonFileKind) -> Result<()> {
let format = self.get_format_for_filename(&kind.to_path_buf())?;
tpf.write(
kind.find(&self.root).expect("Common file not found!"),
format,
)
}
pub fn path_of_common_file(&self, kind: CommonFileKind) -> Option<PathBuf> {
kind.find(&self.root)
}
}
#[derive(Debug)]
pub enum CommonFileKind {
Inbox,
Todo,
Tickle,
Logbook,
Timeline,
}
impl CommonFileKind {
fn find(&self, root: &Path) -> Option<PathBuf> {
let path = root.join(self.to_path_buf());
if path.exists() {
Some(path)
} else {
None
}
}
fn to_path_buf(&self) -> PathBuf {
match *self {
CommonFileKind::Inbox => PathBuf::from("01_inbox.taskpaper"),
CommonFileKind::Todo => PathBuf::from("02_todo.taskpaper"),
CommonFileKind::Tickle => PathBuf::from("03_tickle.taskpaper"),
CommonFileKind::Logbook => PathBuf::from("40_logbook.taskpaper"),
CommonFileKind::Timeline => PathBuf::from("10_timeline.taskpaper"),
}
}
}
#[cfg(test)]
mod tests {
use crate::testing::DatabaseTest;
use crate::CommonFileKind;
// TODO(sirver): Actually add a few tests for tickling, timeline and so on?
#[test]
fn test_tickle_file() {
let mut t = DatabaseTest::new();
t.write_file(
CommonFileKind::Inbox.to_path_buf(),
"- to tickle @tickle(2018-10-01)\n",
);
t.write_file(
CommonFileKind::Tickle.to_path_buf(),
"- before item @tickle(2018-09-01)\n \
- after item @tickle(2018-10-02)\n",
);
let _db = t.read_database();
// TODO(sirver): This test does nothing currently.
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
AvailabilityGroupListeners_Get(#[from] availability_group_listeners::get::Error),
#[error(transparent)]
AvailabilityGroupListeners_CreateOrUpdate(#[from] availability_group_listeners::create_or_update::Error),
#[error(transparent)]
AvailabilityGroupListeners_Delete(#[from] availability_group_listeners::delete::Error),
#[error(transparent)]
AvailabilityGroupListeners_ListByGroup(#[from] availability_group_listeners::list_by_group::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
SqlVirtualMachineGroups_Get(#[from] sql_virtual_machine_groups::get::Error),
#[error(transparent)]
SqlVirtualMachineGroups_CreateOrUpdate(#[from] sql_virtual_machine_groups::create_or_update::Error),
#[error(transparent)]
SqlVirtualMachineGroups_Update(#[from] sql_virtual_machine_groups::update::Error),
#[error(transparent)]
SqlVirtualMachineGroups_Delete(#[from] sql_virtual_machine_groups::delete::Error),
#[error(transparent)]
SqlVirtualMachineGroups_ListByResourceGroup(#[from] sql_virtual_machine_groups::list_by_resource_group::Error),
#[error(transparent)]
SqlVirtualMachineGroups_List(#[from] sql_virtual_machine_groups::list::Error),
#[error(transparent)]
SqlVirtualMachines_ListBySqlVmGroup(#[from] sql_virtual_machines::list_by_sql_vm_group::Error),
#[error(transparent)]
SqlVirtualMachines_List(#[from] sql_virtual_machines::list::Error),
#[error(transparent)]
SqlVirtualMachines_Get(#[from] sql_virtual_machines::get::Error),
#[error(transparent)]
SqlVirtualMachines_CreateOrUpdate(#[from] sql_virtual_machines::create_or_update::Error),
#[error(transparent)]
SqlVirtualMachines_Update(#[from] sql_virtual_machines::update::Error),
#[error(transparent)]
SqlVirtualMachines_Delete(#[from] sql_virtual_machines::delete::Error),
#[error(transparent)]
SqlVirtualMachines_ListByResourceGroup(#[from] sql_virtual_machines::list_by_resource_group::Error),
}
pub mod availability_group_listeners {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
availability_group_listener_name: &str,
subscription_id: &str,
) -> std::result::Result<models::AvailabilityGroupListener, get::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}/availabilityGroupListeners/{}" , operation_config . base_path () , subscription_id , resource_group_name , sql_virtual_machine_group_name , availability_group_listener_name) ;
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilityGroupListener =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get::Error::DefaultResponse { status_code }),
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
availability_group_listener_name: &str,
parameters: &models::AvailabilityGroupListener,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}/availabilityGroupListeners/{}" , operation_config . base_path () , subscription_id , resource_group_name , sql_virtual_machine_group_name , availability_group_listener_name) ;
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilityGroupListener = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilityGroupListener = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => Err(create_or_update::Error::DefaultResponse { status_code }),
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::AvailabilityGroupListener),
Created201(models::AvailabilityGroupListener),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
availability_group_listener_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}/availabilityGroupListeners/{}" , operation_config . base_path () , subscription_id , resource_group_name , sql_virtual_machine_group_name , availability_group_listener_name) ;
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => Err(delete::Error::DefaultResponse { status_code }),
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::AvailabilityGroupListenerListResult, list_by_group::Error> {
let http_client = operation_config.http_client();
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}/availabilityGroupListeners" , operation_config . base_path () , subscription_id , resource_group_name , sql_virtual_machine_group_name) ;
let mut url = url::Url::parse(url_str).map_err(list_by_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AvailabilityGroupListenerListResult =
serde_json::from_slice(rsp_body).map_err(|source| list_by_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list_by_group::Error::DefaultResponse { status_code }),
}
}
pub mod list_by_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<models::OperationListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.SqlVirtualMachine/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::OperationListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list::Error::DefaultResponse { status_code }),
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod sql_virtual_machine_groups {
use super::{models, API_VERSION};
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachineGroup, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_group_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineGroup =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get::Error::DefaultResponse { status_code }),
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
parameters: &models::SqlVirtualMachineGroup,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_group_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineGroup = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => Err(create_or_update::Error::DefaultResponse { status_code }),
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::SqlVirtualMachineGroup),
Created201(models::SqlVirtualMachineGroup),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
parameters: &models::SqlVirtualMachineGroupUpdate,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachineGroup, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_group_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineGroup =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(update::Error::DefaultResponse { status_code }),
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_group_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => Err(delete::Error::DefaultResponse { status_code }),
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachineGroupListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineGroupListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list_by_resource_group::Error::DefaultResponse { status_code }),
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachineGroupListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineGroupListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list::Error::DefaultResponse { status_code }),
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod sql_virtual_machines {
use super::{models, API_VERSION};
pub async fn list_by_sql_vm_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachineListResult, list_by_sql_vm_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachineGroups/{}/sqlVirtualMachines",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_sql_vm_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_sql_vm_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_by_sql_vm_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_sql_vm_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_sql_vm_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list_by_sql_vm_group::Error::DefaultResponse { status_code }),
}
}
pub mod list_by_sql_vm_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachineListResult, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineListResult =
serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list::Error::DefaultResponse { status_code }),
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_name: &str,
expand: Option<&str>,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachine, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachine =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(get::Error::DefaultResponse { status_code }),
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_name: &str,
parameters: &models::SqlVirtualMachine,
subscription_id: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create_or_update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(create_or_update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(create_or_update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachine = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachine = serde_json::from_slice(rsp_body)
.map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => Err(create_or_update::Error::DefaultResponse { status_code }),
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::SqlVirtualMachine),
Created201(models::SqlVirtualMachine),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_name: &str,
parameters: &models::SqlVirtualMachineUpdate,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachine, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(parameters).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachine =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(update::Error::DefaultResponse { status_code }),
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
sql_virtual_machine_name: &str,
subscription_id: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
sql_virtual_machine_name
);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(delete::Response::Ok200),
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => Err(delete::Error::DefaultResponse { status_code }),
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
resource_group_name: &str,
subscription_id: &str,
) -> std::result::Result<models::SqlVirtualMachineListResult, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_by_resource_group::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_by_resource_group::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_by_resource_group::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SqlVirtualMachineListResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => Err(list_by_resource_group::Error::DefaultResponse { status_code }),
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { status_code: http::StatusCode },
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
use super::{Block, BlockId, Field};
use crate::resource::ResourceId;
use crate::Promise;
use std::collections::HashSet;
use wasm_bindgen::prelude::*;
#[derive(Clone)]
pub struct Memo {
name: String,
text: String,
tags: HashSet<BlockId>,
}
impl Memo {
pub fn new() -> Self {
Self {
name: String::new(),
text: String::new(),
tags: HashSet::new(),
}
}
pub fn name(&self) -> &String {
&self.name
}
pub fn set_name(&mut self, name: String) {
self.name = name;
}
pub fn text(&self) -> &String {
&self.text
}
pub fn set_text(&mut self, text: String) {
self.text = text;
}
pub fn tags<'a>(
&self,
tag_index: impl Iterator<Item = &'a BlockId>,
) -> impl Iterator<Item = BlockId> {
let mut tags = vec![];
for tag_id in tag_index {
if self.has(tag_id) {
tags.push(tag_id.clone());
}
}
tags.into_iter()
}
pub fn has(&self, tag_id: &BlockId) -> bool {
self.tags.get(tag_id).is_some()
}
pub fn add_tag(&mut self, tag_id: BlockId) {
self.tags.insert(tag_id);
}
pub fn remove_tag(&mut self, tag_name: &BlockId) {
self.tags.remove(tag_name);
}
}
impl Block for Memo {
fn pack(&self) -> Promise<JsValue> {
unimplemented!();
}
fn unpack(_: &mut Field, val: JsValue) -> Promise<Box<Self>> {
unimplemented!();
}
fn dependents(&self, field: &Field) -> HashSet<BlockId> {
unimplemented!();
}
fn resources(&self, field: &Field) -> HashSet<ResourceId> {
unimplemented!();
}
}
|
// This file is part of Substrate.
// Copyright (C) 2019-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Substrate core types and inherents for timestamps.
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "std")]
use codec::Decode;
use codec::Encode;
#[cfg(feature = "std")]
use sp_inherents::ProvideInherentData;
use sp_inherents::{InherentData, InherentIdentifier, IsFatalError};
use sp_runtime::RuntimeString;
/// The identifier for the `timestamp` inherent.
pub const INHERENT_IDENTIFIER: InherentIdentifier = *b"timstap0";
/// The type of the inherent.
pub type InherentType = u64;
/// Errors that can occur while checking the timestamp inherent.
#[derive(Encode, sp_runtime::RuntimeDebug)]
#[cfg_attr(feature = "std", derive(Decode))]
pub enum InherentError {
/// The timestamp is valid in the future.
/// This is a non-fatal-error and will not stop checking the inherents.
ValidAtTimestamp(InherentType),
/// Some other error.
Other(RuntimeString),
}
impl IsFatalError for InherentError {
fn is_fatal_error(&self) -> bool {
match self {
InherentError::ValidAtTimestamp(_) => false,
InherentError::Other(_) => true,
}
}
}
impl InherentError {
/// Try to create an instance ouf of the given identifier and data.
#[cfg(feature = "std")]
pub fn try_from(id: &InherentIdentifier, data: &[u8]) -> Option<Self> {
if id == &INHERENT_IDENTIFIER {
<InherentError as codec::Decode>::decode(&mut &data[..]).ok()
} else {
None
}
}
}
/// Auxiliary trait to extract timestamp inherent data.
pub trait TimestampInherentData {
/// Get timestamp inherent data.
fn timestamp_inherent_data(&self) -> Result<InherentType, sp_inherents::Error>;
}
impl TimestampInherentData for InherentData {
fn timestamp_inherent_data(&self) -> Result<InherentType, sp_inherents::Error> {
self.get_data(&INHERENT_IDENTIFIER)
.and_then(|r| r.ok_or_else(|| "Timestamp inherent data not found".into()))
}
}
/// Provide duration since unix epoch in millisecond for timestamp inherent.
#[cfg(feature = "std")]
pub struct InherentDataProvider;
#[cfg(feature = "std")]
impl ProvideInherentData for InherentDataProvider {
fn inherent_identifier(&self) -> &'static InherentIdentifier {
&INHERENT_IDENTIFIER
}
fn provide_inherent_data(
&self,
inherent_data: &mut InherentData,
) -> Result<(), sp_inherents::Error> {
use wasm_timer::SystemTime;
let now = SystemTime::now();
now.duration_since(SystemTime::UNIX_EPOCH)
.map_err(|_| "Current time is before unix epoch".into())
.and_then(|d| {
let duration: InherentType = d.as_millis() as u64;
inherent_data.put_data(INHERENT_IDENTIFIER, &duration)
})
}
fn error_to_string(&self, error: &[u8]) -> Option<String> {
InherentError::try_from(&INHERENT_IDENTIFIER, error).map(|e| format!("{:?}", e))
}
}
/// A trait which is called when the timestamp is set.
#[impl_trait_for_tuples::impl_for_tuples(30)]
pub trait OnTimestampSet<Moment> {
fn on_timestamp_set(moment: Moment);
}
|
/// A structure which represents 4 box sides.
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct Sides<T> {
/// Top side.
pub top: T,
/// Bottom side.
pub bottom: T,
/// Left side.
pub left: T,
/// Right side.
pub right: T,
}
impl<T> Sides<T> {
/// Creates a new object.
pub const fn new(left: T, right: T, top: T, bottom: T) -> Self {
Self {
top,
bottom,
left,
right,
}
}
/// Creates a new object.
pub const fn filled(value: T) -> Self
where
T: Copy,
{
Self {
top: value,
bottom: value,
left: value,
right: value,
}
}
}
|
use std::ops::Deref;
use std::fmt::{self, Display};
use itertools::Itertools;
use super::super::{LinedString, Environment, Elaborator, TermID, ThmID, SortID,
Sort, Term, Thm};
use super::{AtomID, LispKind, LispVal, Uncons, InferTarget, Proc, ProcPos};
#[derive(Copy, Clone)]
pub struct FormatEnv<'a> {
pub source: &'a LinedString,
pub env: &'a Environment,
}
pub struct Print<'a, D: ?Sized> {
pub fe: FormatEnv<'a>,
pub e: &'a D,
}
impl<'a> FormatEnv<'a> {
pub fn to<D: ?Sized>(self, e: &'a D) -> Print<'a, D> {
Print {fe: self, e}
}
}
impl<'a> Deref for FormatEnv<'a> {
type Target = Environment;
fn deref(&self) -> &Environment {self.env}
}
pub trait EnvDisplay {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result;
}
impl Elaborator {
pub fn format_env(&self) -> FormatEnv {
FormatEnv {source: &self.ast.source, env: self}
}
pub fn print<'a, D: ?Sized>(&'a self, e: &'a D) -> Print<'a, D> {
self.format_env().to(e)
}
}
impl<'a, D: EnvDisplay + ?Sized> fmt::Display for Print<'a, D> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.e.fmt(self.fe, f) }
}
fn list(init: &[LispVal], e: Option<&LispKind>, mut start: bool, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
for e in init {
if start {
write!(f, "({}", fe.to(e))?;
start = false
} else {
write!(f, " {}", fe.to(e))?
}
}
match e {
None => if start {write!(f, "()")} else {write!(f, ")")},
Some(LispKind::List(es)) => list(es, None, start, fe, f),
Some(LispKind::DottedList(es, r)) => list(es, Some(&r), start, fe, f),
Some(e) if e.exactly(0) => if start {write!(f, "()")} else {write!(f, ")")},
Some(e) => if start {write!(f, "{}", fe.to(e))} else {write!(f, " . {})", fe.to(e))}
}
}
fn alphanumber(n: usize) -> String {
let mut out = Vec::with_capacity(2);
let mut n = n + 1;
while n != 0 {
out.push(b'a' + ((n - 1) % 26) as u8);
n = (n - 1) / 26;
}
out.reverse();
unsafe { String::from_utf8_unchecked(out) }
}
impl EnvDisplay for AtomID {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
fe.data[*self].name.fmt(f)
}
}
impl EnvDisplay for Option<AtomID> {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
match self {
None => "_".fmt(f),
Some(a) => a.fmt(fe, f)
}
}
}
impl EnvDisplay for SortID {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
fe.sorts[*self].atom.fmt(fe, f)
}
}
impl EnvDisplay for TermID {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
fe.terms[*self].atom.fmt(fe, f)
}
}
impl EnvDisplay for ThmID {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
fe.thms[*self].atom.fmt(fe, f)
}
}
impl EnvDisplay for LispVal {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(fe, f) }
}
impl EnvDisplay for LispKind {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
match self {
LispKind::Atom(a) => a.fmt(fe, f),
LispKind::List(es) if es.is_empty() => "()".fmt(f),
LispKind::DottedList(es, r) if es.is_empty() => r.fmt(fe, f),
LispKind::DottedList(es, r) => list(es, Some(&r), true, fe, f),
LispKind::List(es) => list(es, None, true, fe, f),
LispKind::Annot(_, e) => e.fmt(fe, f),
LispKind::Number(n) => n.fmt(f),
LispKind::String(s) => write!(f, "{:?}", s),
LispKind::Bool(true) => "#t".fmt(f),
LispKind::Bool(false) => "#f".fmt(f),
LispKind::Syntax(s) => s.fmt(f),
LispKind::Undef => write!(f, "#undef"),
LispKind::Proc(Proc::Builtin(p)) => p.fmt(f),
LispKind::Proc(Proc::Lambda {pos: ProcPos::Unnamed(pos), ..}) => {
let r = fe.source.to_pos(pos.span.start);
let fname = pos.file.path().file_name().unwrap().to_str().unwrap();
write!(f, "#[fn at {} {}:{}]", fname, r.line + 1, r.character + 1)
}
&LispKind::Proc(Proc::Lambda {pos: ProcPos::Named(ref pos, _, a), ..}) => {
let r = fe.source.to_pos(pos.span.start);
let fname = pos.file.path().file_name().unwrap().to_str().unwrap();
let x = &fe.data[a].name;
write!(f, "#[fn {} at {} {}:{}]", x, fname, r.line + 1, r.character + 1)
}
LispKind::Proc(Proc::MatchCont(_)) => write!(f, "#[match cont]"),
LispKind::Proc(Proc::RefineCallback) => write!(f, "#[refine]"),
LispKind::Proc(Proc::ProofThunk(x, _)) => write!(f, "#[proof of {}]", fe.to(x)),
LispKind::AtomMap(m) => {
write!(f, "(atom-map!")?;
for (a, v) in m {write!(f, " [{} {}]", fe.data[*a].name, fe.to(v))?}
write!(f, ")")
}
LispKind::Ref(m) => m.get().fmt(fe, f),
&LispKind::MVar(n, _) => write!(f, "?{}", alphanumber(n)),
LispKind::Goal(e) => write!(f, "(goal {})", fe.to(e)),
}
}
}
impl EnvDisplay for Uncons {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Uncons::New(e) => e.fmt(fe, f),
Uncons::List(es) => list(es, None, true, fe, f),
Uncons::DottedList(es, r) => list(es, Some(&r), true, fe, f),
}
}
}
impl<T: EnvDisplay> EnvDisplay for [T] {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[{}]", self.iter().map(|e| fe.to(e)).format(", "))
}
}
impl EnvDisplay for crate::util::Span {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
fe.source[*self].fmt(f)
}
}
impl<T: EnvDisplay> EnvDisplay for Vec<T> {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { self.deref().fmt(fe, f) }
}
impl<T: EnvDisplay> EnvDisplay for Box<T> {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { self.deref().fmt(fe, f) }
}
impl<T: EnvDisplay> EnvDisplay for std::sync::Arc<T> {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { self.deref().fmt(fe, f) }
}
impl<T: EnvDisplay> EnvDisplay for std::rc::Rc<T> {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { self.deref().fmt(fe, f) }
}
impl EnvDisplay for InferTarget {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
match self {
InferTarget::Unknown => "?".fmt(f),
InferTarget::Provable => "provable".fmt(f),
InferTarget::Bound(a) => write!(f, "{{{}}}", fe.to(a)),
InferTarget::Reg(a) => a.fmt(fe, f),
}
}
}
impl Display for Sort {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}sort {};", self.mods, self.name)
}
}
impl EnvDisplay for Term {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
fe.pretty(|p| p.term(self).render_fmt(80, f))
}
}
impl EnvDisplay for Thm {
fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {
fe.pretty(|p| p.thm(self).render_fmt(80, f))
}
}
|
use byteorder::{ByteOrder, NativeEndian};
use crate::{
traits::{Emitable, Parseable},
DecodeError, Field,
};
/// Byte/Packet throughput statistics
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct TcStatsBasic {
/// number of seen bytes
pub bytes: u64,
/// number of seen packets
pub packets: u32,
}
const BYTES: Field = 0..8;
const PACKETS: Field = 8..12;
pub const TC_STATS_BASIC_LEN: usize = PACKETS.end;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TcStatsBasicBuffer<T> {
buffer: T,
}
impl<T: AsRef<[u8]>> TcStatsBasicBuffer<T> {
pub fn new(buffer: T) -> TcStatsBasicBuffer<T> {
TcStatsBasicBuffer { buffer }
}
pub fn new_checked(buffer: T) -> Result<TcStatsBasicBuffer<T>, DecodeError> {
let buf = Self::new(buffer);
buf.check_buffer_length()?;
Ok(buf)
}
fn check_buffer_length(&self) -> Result<(), DecodeError> {
let len = self.buffer.as_ref().len();
if len < TC_STATS_BASIC_LEN {
return Err(format!(
"invalid TcStatsBasicBuffer buffer: length is {} instead of {}",
len, TC_STATS_BASIC_LEN
)
.into());
}
Ok(())
}
pub fn into_inner(self) -> T {
self.buffer
}
pub fn bytes(&self) -> u64 {
NativeEndian::read_u64(&self.buffer.as_ref()[BYTES])
}
pub fn packets(&self) -> u32 {
NativeEndian::read_u32(&self.buffer.as_ref()[PACKETS])
}
}
impl<T: AsRef<[u8]> + AsMut<[u8]>> TcStatsBasicBuffer<T> {
pub fn set_bytes(&mut self, value: u64) {
NativeEndian::write_u64(&mut self.buffer.as_mut()[BYTES], value)
}
pub fn set_packets(&mut self, value: u32) {
NativeEndian::write_u32(&mut self.buffer.as_mut()[PACKETS], value)
}
}
impl<T: AsRef<[u8]>> Parseable<TcStatsBasic> for TcStatsBasicBuffer<T> {
fn parse(&self) -> Result<TcStatsBasic, DecodeError> {
self.check_buffer_length()?;
Ok(TcStatsBasic {
bytes: self.bytes(),
packets: self.packets(),
})
}
}
impl Emitable for TcStatsBasic {
fn buffer_len(&self) -> usize {
TC_STATS_BASIC_LEN
}
fn emit(&self, buffer: &mut [u8]) {
let mut buffer = TcStatsBasicBuffer::new(buffer);
buffer.set_bytes(self.bytes);
buffer.set_packets(self.packets);
}
}
|
use crate::domain::entities::{User, Task};
use rusqlite::{Connection, NO_PARAMS, types::ToSql};
pub trait SQLable {
fn select() -> &'static str;
fn insert() -> &'static str;
fn create_table() -> &'static str;
#[inline(always)]
fn bind<F, T>(data: &Self, consumer: F) -> T where F: FnMut(&'static str, &[&ToSql]) -> T;
fn from_row<'row, 'stmt>(repo: &Rusqlite, row: &rusqlite::Row<'row, 'stmt>) -> Self;
}
pub struct QueryValue<'query>(&'static str, &'query ToSql);
pub trait SQLSearchable : Searchable + SQLable {
fn build_query(creds: &[<Self as Searchable>::Credentials]) -> Vec<QueryValue>;
}
// Some kind of generalization so I can extract the things that differ.
// The major drawback I found with this, is the problems related to the relation
// between different objects. A User has Tasks, but I don't get them like this
// and a LEFT JOIN doesn't really help, because then we need to post-process the data
impl SQLable for User {
fn select() -> &'static str { "SELECT * FROM users" }
fn insert() -> &'static str { "INSERT INTO users (name) VALUES (?)" }
fn create_table() -> &'static str {
"CREATE TABLE users (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL
)"
}
fn bind<F, T>(data: &Self, mut consumer: F) -> T where F: FnMut(&'static str, &[&ToSql]) -> T {
let bindings: [&ToSql; 1] = [&data.name];
let my_id = consumer(Self::insert(), &bindings);
// TODO: optimize with bulk insert
for task in data.tasks() {
Task::bind(task, &mut consumer);
}
my_id
}
fn from_row<'row, 'stmt>(repo: &Rusqlite, row: &rusqlite::Row<'row, 'stmt>) -> Self {
let id: <Rusqlite as Repository<User>>::Id = row.get(0);
let name: String = row.get(1);
let tasks = repo.query(&[QueryValue("id", &id)], None);
User::with_tasks(name, tasks)
}
}
impl SQLSearchable for User {
fn build_query(creds: &[<Self as Searchable>::Credentials]) -> Vec<QueryValue> {
let mut result = Vec::with_capacity(creds.len());
for pred in creds {
match pred {
UserSearchTerms::Name(ref name) => {
result.push(QueryValue("name", name));
}
}
}
result
}
}
impl SQLable for Task {
fn select() -> &'static str { "SELECT * from tasks" }
fn insert() -> &'static str { "INSERT INTO tasks (desc, done, tags) VALUES (?, ?, ?)" }
fn create_table() -> &'static str {
"CREATE TABLE tasks (
id INTEGER PRIMARY KEY,
desc TEXT NOT NULL,
done BOOL NOT NULL,
tags TEXT
)"
}
fn bind<F, T>(data: &Self, mut consumer: F) -> T where F: FnMut(&'static str, &[&ToSql]) -> T {
let joined = data.tags.join(",");
let tags: &ToSql = if joined.len() > 0 { &joined } else { &rusqlite::types::Null };
let bindings: [&ToSql; 3] = [&data.desc, &data.done, &tags];
let id = consumer(Self::insert(), &bindings);
id
}
fn from_row<'row, 'stmt>(_repo: &Rusqlite, row: &rusqlite::Row<'row, 'stmt>) -> Self {
let desc: String = row.get("desc");
let done: bool = row.get("done");
let tags: Option<String> = row.get("tags");
let tag_vec = tags.map_or(vec![], |s| s.split(",").map(Into::into).collect());
Task {
desc: desc,
done: done,
tags: tag_vec,
due: None, // No support here yet, lawl
}
}
}
// The thing that ties this imlpementation to rusqlite
pub struct Rusqlite {
conn: rusqlite::Connection,
}
use crate::domain::entities::UserSearchTerms;
impl Rusqlite {
pub fn in_memory() -> Self {
Rusqlite {
conn: Connection::open_in_memory().expect("Could not open Database"),
}
}
pub fn setup<T: SQLable>(&self) -> Result<usize, rusqlite::Error> {
self.conn.execute(T::create_table(), NO_PARAMS)
}
fn get_all<T: SQLable>(&self) -> Vec<T> {
self.query(&[], None)
}
fn get<T: SQLable>(&self, id: &i64) -> Option<T> {
let result = self.query(&[QueryValue("id", id)], Some(1));
result.into_iter().next()
}
fn save<T: SQLable>(&mut self, data: &T) -> i64 {
T::bind(data, |sql, bindings| {
let mut stmnt = self.conn.prepare(sql).expect("Could not prepare save");
stmnt.insert(bindings).expect("Could not insert")
})
}
fn query<T: SQLable>(&self, parameters: &[QueryValue], limit: Option<u32>) -> Vec<T> {
let mut sql = T::select().to_string();
if parameters.len() > 0 {
sql += " WHERE";
}
let mut params: Vec<&ToSql> = Vec::with_capacity(parameters.len());
let mut iter = parameters.iter().peekable();
while let Some(QueryValue(field, value)) = iter.next() {
sql += &format!(" {} = (?)", field);
params.push(value);
if iter.peek().is_some() {
sql += " AND";
}
}
if let Some(limit) = limit {
sql += &format!(" LIMIT {}", limit);
}
let mut stmnt = self.conn
.prepare(&sql)
.expect("Could not prepare statement");
let iter = stmnt
.query_map(¶ms, |row| T::from_row(&self, row))
.expect("Could not bind params");
iter.map(|elem| elem.expect("Could not construct")).collect()
}
}
use crate::domain::Repository;
// Blanket implementation for all things SQLable
impl<T: SQLable> Repository<T> for Rusqlite {
type Id = i64;
fn all(&self) -> Vec<T> {
self.get_all::<T>()
}
fn get(&self, id: &i64) -> Option<T> {
self.get::<T>(id)
}
fn save(&mut self, data: &T) -> Self::Id {
self.save::<T>(data)
}
}
use crate::domain::{Searchable, SearchableRepository};
impl<T: SQLSearchable> SearchableRepository<T> for Rusqlite {
fn find(&self, credentials: &[<T as Searchable>::Credentials], limit: Option<u32>) -> Vec<T> {
let query_data = T::build_query(credentials);
self.query(&query_data, limit)
}
}
// Two different In Memory Repository implementations, just to prove the concept.
pub struct TrivialRepository<T: Clone>(Vec<T>);
impl<T: Clone> TrivialRepository<T> {
pub fn new() -> Self {
TrivialRepository(Vec::new())
}
}
impl<T: Clone> Repository<T> for TrivialRepository<T> {
type Id = usize;
fn all(&self) -> Vec<T> {
self.0.clone()
}
fn get(&self, id: &Self::Id) -> Option<T> {
self.0.get(*id).cloned()
}
fn save(&mut self, data: &T) -> Self::Id {
let idx = self.0.len();
self.0.push(data.clone());
idx
}
}
use std::collections::HashMap;
use uuid::Uuid;
pub struct HashRepository<T: Clone>(HashMap<Uuid, T>);
impl<T: Clone> HashRepository<T> {
pub fn new() -> Self {
HashRepository(HashMap::new())
}
}
impl<T: Clone> Repository<T> for HashRepository<T> {
type Id = Uuid;
fn all(&self) -> Vec<T> {
self.0.values().cloned().collect()
}
fn get(&self, id: &Self::Id) -> Option<T> {
self.0.get(id).cloned()
}
fn save(&mut self, data: &T) -> Self::Id {
let id = Uuid::new_v4();
self.0.insert(id, data.clone());
id
}
}
#[cfg(test)]
mod test {
use crate::domain::Repository;
fn test_save_and_get<R: Repository<String>>(mut repo: R) {
let input = "This is a cool string".to_string();
let id = repo.save(&input);
let output = repo.get(&id).expect("Could not find what I just put in!");
assert_eq!(input, output);
}
#[test]
fn hash_repo_works() {
let hash_repo = super::HashRepository::new();
test_save_and_get(hash_repo);
}
#[test]
fn vec_repo_works() {
let vec_repo = super::TrivialRepository::new();
test_save_and_get(vec_repo);
}
#[test]
fn test_get_all_done_via_id() {
use crate::domain::{entities::{User, Task}, Repository};
let mut one_done = Task::new("One");
one_done.finish();
let mut two_done = Task::new("Two");
two_done.finish();
let not_done = Task::new("Tre");
let mut user: User = User::new("Someone");
user.add_task(one_done.clone());
user.add_task(not_done.clone());
user.add_task(two_done.clone());
let mut repo = crate::data::HashRepository::new();
let user_id = repo.save(&user);
let found_done = crate::domain::usecases::find_all_done_via_id(&repo, &user_id);
assert_eq!(found_done, vec![one_done, two_done]);
}
#[test]
fn test_searchable() {
use super::Rusqlite;
use super::{User, Task, UserSearchTerms};
use crate::domain::SearchableRepository;
let mut repo = Rusqlite::in_memory();
repo.setup::<User>().expect("Could not setup tables");
repo.setup::<Task>().expect("Could not setup tables");
let dummy_task = Task::new("Buy milk");
let a = User::with_tasks("A", vec![dummy_task.clone()]);
let b = User::with_tasks("B", vec![dummy_task.clone()]);
let c = User::with_tasks("C", vec![dummy_task.clone()]);
let d = User::with_tasks("D", vec![dummy_task.clone()]);
repo.save(&a);
repo.save(&b);
repo.save(&c);
repo.save(&d);
let query_result: Vec<User> = repo.find(
&[UserSearchTerms::Name("C".to_string())],
Some(1));
assert_eq!(query_result, vec![c]);
}
} |
#![allow(dead_code)]
mod assembler;
#[cfg(test)]
mod tests {
use super::assembler::assemble::SemanticError;
use super::assembler::ast::*;
use std::collections::HashMap;
use combine::stream::position;
use combine::EasyParser;
use super::assembler::assemble::Encode;
#[test]
fn test_jump_assemble() {
let test_vec = vec![
("JMI $128", 0x5080, None),
(
"JEQ $4096",
0,
Some(SemanticError::IMMEDIATEOVERSIZE((1, 10))),
),
("JMP $4095", 0x4FFF, None),
("STP", 0x7000, None),
("ADD R0, $12 ROR $8", 0x838C, None),
("SUB R3, R2 ASR $15", 0x9DFA, None),
("TST R3, R3 ROR $15", 0xFCFF, None),
("LDR R2, [R3]", 0x0AC3, None),
("LDR R3, [R3, $15]!", 0x0FFF, None),
("STR R0, [R0]", 0x00C0, None),
("STR R3, [R3], -$10", 0x0D2B, None),
];
test_vec
.iter()
.for_each(|(instr, res, err)| test_assemble(instr, *res, err));
}
fn test_assemble(instruction: &str, encoding: u16, err: &Option<SemanticError>) {
println!("testing: {}", instruction);
let mut prog = super::assembler::parse::program()
.easy_parse(position::Stream::new(instruction))
.unwrap()
.0;
if let Statement::Instruction(instr) = &mut prog.statements[0] {
match Encode::instruction(instr, &HashMap::new()) {
Ok(instr) => assert_eq!(instr, encoding),
Err(e) => assert_eq!(&Some(e), err),
}
}
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate rocket;
mod expenses;
mod server;
fn main() {
if let Err(err) = server::run() {
eprintln!("Error: {}", err);
std::process::exit(1);
}
}
|
use std::collections::HashMap;
pub fn parse(input: &str) -> HashMap<&str, &str> {
input
.split("&")
.collect::<Vec<&str>>()
.into_iter()
.map(|entry| {
let pair: Vec<_> = entry.split("=").collect();
(pair[0], pair[1])
})
.collect::<HashMap<&str, &str>>()
}
#[cfg(test)]
mod test {
use crate::set2::kv::parse;
#[test]
fn parse_key_values() {
let input = "foo=bar&baz=qux";
let output = parse(input);
assert_eq!(output.get("foo").unwrap(), &"bar");
assert_eq!(output.get("baz").unwrap(), &"qux");
}
}
|
use std::collections::BTreeSet;
use pickpocket::batch::BatchApp;
use pickpocket::Status;
#[tokio::main]
async fn main() {
let app = BatchApp::default();
let mut ids: BTreeSet<&str> = BTreeSet::new();
let cache_reading_list = app.cache_client.list_all();
for line in app.file_lines() {
let url = line.expect("Could not read line");
match app.get(&url as &str) {
Some(id) => {
let item = cache_reading_list.get(id).expect("cant locate id");
if item.status() == Status::Unread {
ids.insert(id);
} else {
println!("Url {} already marked as read", url);
}
}
None => println!("Url {} did not match", &url),
}
}
app.client.mark_as_read(ids).await;
}
|
use crate::common::{
self,
factories::prelude::*,
snitches::{CoreSnitch, TransportSnitch},
};
use ::common::ipnetwork::IpNetwork;
use ::common::rsip::{self, prelude::*};
use models::transport::RequestMsg;
use sip_server::{
core::impls::{UserAgent, Registrar},
ReqProcessor, SipBuilder, SipManager, Transaction, CoreLayer,
};
use std::sync::Arc;
async fn setup() -> (Registrar, Arc<SipManager>) {
let sip_manager = SipBuilder::new::<CoreSnitch, Transaction, TransportSnitch>()
.expect("sip manager failed")
.manager;
let registrar = Registrar::new(Arc::downgrade(&sip_manager));
(registrar, sip_manager)
}
#[tokio::test]
#[serial_test::serial]
async fn with_no_records_returns_empty_list() {
let _ = common::setup();
let (registrar, sip_manager) = setup().await;
let transport = sip_manager.transport.clone();
let transport = as_any!(transport, TransportSnitch);
let res = registrar
.process_incoming_request(RequestMsg {
sip_request: requests::register_query_request(),
..Randomized::default()
})
.await;
assert!(res.is_ok(), "returns: {:?}", res);
assert_eq!(transport.messages.len().await, 1);
assert_eq!(
transport.messages.first_response().await.status_code,
200.into()
);
assert!(transport
.messages
.first_response()
.await
.headers
.iter()
.find(|h| matches!(h, rsip::Header::Contact(_)))
.is_none());
}
#[tokio::test]
#[serial_test::serial]
async fn with_records_returns_a_list_of_contacts() {
let _ = common::setup();
create_registration();
create_registration();
let (registrar, sip_manager) = setup().await;
let transport = sip_manager.transport.clone();
let transport = as_any!(transport, TransportSnitch);
let res = registrar
.process_incoming_request(RequestMsg {
sip_request: requests::register_query_request(),
..Randomized::default()
})
.await;
assert!(res.is_ok(), "returns: {:?}", res);
assert_eq!(transport.messages.len().await, 1);
assert_eq!(
transport.messages.first_response().await.status_code,
200.into()
);
assert_eq!(
transport
.messages
.first_response()
.await
.headers
.iter()
.filter(|h| matches!(h, rsip::Header::Contact(_)))
.count(),
2
);
}
#[tokio::test]
#[serial_test::serial]
async fn with_new_register_request_saves_the_contact() {
let _ = common::setup();
create_registration();
let (registrar, sip_manager) = setup().await;
let transport = sip_manager.transport.clone();
let transport = as_any!(transport, TransportSnitch);
let res = registrar
.process_incoming_request(RequestMsg {
sip_request: requests::register_request(),
..Randomized::default()
})
.await;
assert!(res.is_ok(), "returns: {:?}", res);
assert_eq!(transport.messages.len().await, 1);
assert_eq!(
transport.messages.first_response().await.status_code,
200.into()
);
assert_eq!(
transport
.messages
.first_response()
.await
.headers
.iter()
.filter(|h| matches!(h, rsip::Header::Contact(_)))
.count(),
2
);
assert_eq!(
store::Registration::count(Default::default()).expect("registrations count"),
2
)
}
#[tokio::test]
#[serial_test::serial]
async fn with_wrong_from_to_register() {
use rsip::Uri;
let _ = common::setup();
let (registrar, sip_manager) = setup().await;
let transport = sip_manager.transport.clone();
let transport = as_any!(transport, TransportSnitch);
let mut request = requests::register_request();
request
.headers
.unique_push(rsip::typed::To::from(Uri::default().with_user("another")).into());
let res = registrar
.process_incoming_request(RequestMsg {
sip_request: request,
..Randomized::default()
})
.await;
assert!(res.is_err(), "returns: {:?}", res);
assert_eq!(transport.messages.len().await, 0);
}
#[tokio::test]
#[serial_test::serial]
async fn delete_registration() {
let _ = common::setup();
let (registration, uri) = create_registration();
let (registrar, sip_manager) = setup().await;
let transport = sip_manager.transport.clone();
let transport = as_any!(transport, TransportSnitch);
let res = registrar
.process_incoming_request(RequestMsg {
sip_request: requests::register_delete_request_with_uri(uri),
..Randomized::default()
})
.await;
assert!(res.is_ok(), "returns: {:?}", res);
assert_eq!(transport.messages.len().await, 1);
assert_eq!(
transport.messages.first_response().await.status_code,
200.into()
);
assert_eq!(
transport
.messages
.first_response()
.await
.headers
.iter()
.filter(|h| matches!(h, rsip::Header::Contact(_)))
.count(),
0
);
assert_eq!(
store::Registration::count(Default::default()).expect("registrations count"),
0
)
}
fn create_registration() -> (store::Registration, rsip::Uri) {
use ::common::chrono::{Duration, Utc};
use std::convert::TryInto;
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
let ip_address: IpNetwork = IpAddrBuilder::localhost().into();
let user: String = "filippos".into();
let uri = rsip::Uri {
scheme: Some(rsip::Scheme::default()),
host_with_port: rsip::HostWithPort::from(ip_address.clone().ip()),
auth: Some(rsip::Auth {
user: user.clone(),
password: None,
}),
params: vec![],
headers: vec![].into(),
};
//TODO: should impl Randomized default
let mut new_registration = store::DirtyRegistration {
username: Some(user),
domain: Some("localhost".into()),
expires: Some(Utc::now() + Duration::minutes(100)),
call_id: Some(rsip::headers::CallId::default().value().into()),
cseq: Some(1),
user_agent: Some(rsip::headers::UserAgent::default().value().into()),
instance: None,
ip_address: Some(ip_address),
port: Some(5060),
transport: Some(rsip::Transport::default().into()),
contact: None,
contact_uri: Some(uri.to_string()),
};
let contact_header: rsip::Header = rsip::headers::Contact::new(rsip::typed::Contact {
display_name: None,
uri: uri.clone(),
params: Default::default(),
})
.into();
new_registration.contact = Some(
contact_header
.to_string()
.splitn(2, ':')
.last()
.expect("last")
.to_owned(),
);
let foo: rsip::headers::Contact = new_registration
.contact
.clone()
.expect("contact")
.try_into()
.expect("contact try into");
(
store::Registration::create(new_registration).expect("registration create"),
uri,
)
}
|
fn multi_hello() -> (&'static str, i32) {
("Hello",42)
}
fn main() {
let (str,num)=multi_hello();
println!("{},{}",str,num);
}
|
#[doc = "Register `GCR` reader"]
pub type R = crate::R<GCR_SPEC>;
#[doc = "Register `GCR` writer"]
pub type W = crate::W<GCR_SPEC>;
#[doc = "Field `WW1RSC` reader - WWDG1 reset scope control"]
pub type WW1RSC_R = crate::BitReader<WW1RSC_A>;
#[doc = "WWDG1 reset scope control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum WW1RSC_A {
#[doc = "0: Clear WWDG1 scope control"]
Clear = 0,
#[doc = "1: Set WWDG1 scope control"]
Set = 1,
}
impl From<WW1RSC_A> for bool {
#[inline(always)]
fn from(variant: WW1RSC_A) -> Self {
variant as u8 != 0
}
}
impl WW1RSC_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WW1RSC_A {
match self.bits {
false => WW1RSC_A::Clear,
true => WW1RSC_A::Set,
}
}
#[doc = "Clear WWDG1 scope control"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == WW1RSC_A::Clear
}
#[doc = "Set WWDG1 scope control"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == WW1RSC_A::Set
}
}
#[doc = "Field `WW1RSC` writer - WWDG1 reset scope control"]
pub type WW1RSC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, WW1RSC_A>;
impl<'a, REG, const O: u8> WW1RSC_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear WWDG1 scope control"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(WW1RSC_A::Clear)
}
#[doc = "Set WWDG1 scope control"]
#[inline(always)]
pub fn set(self) -> &'a mut crate::W<REG> {
self.variant(WW1RSC_A::Set)
}
}
impl R {
#[doc = "Bit 0 - WWDG1 reset scope control"]
#[inline(always)]
pub fn ww1rsc(&self) -> WW1RSC_R {
WW1RSC_R::new((self.bits & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - WWDG1 reset scope control"]
#[inline(always)]
#[must_use]
pub fn ww1rsc(&mut self) -> WW1RSC_W<GCR_SPEC, 0> {
WW1RSC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC Global Control Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GCR_SPEC;
impl crate::RegisterSpec for GCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gcr::R`](R) reader structure"]
impl crate::Readable for GCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`gcr::W`](W) writer structure"]
impl crate::Writable for GCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets GCR to value 0"]
impl crate::Resettable for GCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Reader of register DDFT_CONFIG"]
pub type R = crate::R<u32, super::DDFT_CONFIG>;
#[doc = "Writer for register DDFT_CONFIG"]
pub type W = crate::W<u32, super::DDFT_CONFIG>;
#[doc = "Register DDFT_CONFIG `reset()`'s with value 0"]
impl crate::ResetValue for super::DDFT_CONFIG {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DDFT_ENABLE`"]
pub type DDFT_ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DDFT_ENABLE`"]
pub struct DDFT_ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> DDFT_ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `BLERD_DDFT_EN`"]
pub type BLERD_DDFT_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BLERD_DDFT_EN`"]
pub struct BLERD_DDFT_EN_W<'a> {
w: &'a mut W,
}
impl<'a> BLERD_DDFT_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `DDFT_MUX_CFG1`"]
pub type DDFT_MUX_CFG1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DDFT_MUX_CFG1`"]
pub struct DDFT_MUX_CFG1_W<'a> {
w: &'a mut W,
}
impl<'a> DDFT_MUX_CFG1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 8)) | (((value as u32) & 0x1f) << 8);
self.w
}
}
#[doc = "Reader of field `DDFT_MUX_CFG2`"]
pub type DDFT_MUX_CFG2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DDFT_MUX_CFG2`"]
pub struct DDFT_MUX_CFG2_W<'a> {
w: &'a mut W,
}
impl<'a> DDFT_MUX_CFG2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 16)) | (((value as u32) & 0x1f) << 16);
self.w
}
}
impl R {
#[doc = "Bit 0 - Enables the DDFT output from BLESS 1: DDFT is enabled 0: DDFT is disabled"]
#[inline(always)]
pub fn ddft_enable(&self) -> DDFT_ENABLE_R {
DDFT_ENABLE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Enables the DDFT inputs from CYBLERD55 chip 1: DDFT inputs are enabled 0: DDFT inputs are disabled"]
#[inline(always)]
pub fn blerd_ddft_en(&self) -> BLERD_DDFT_EN_R {
BLERD_DDFT_EN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bits 8:12 - dbg_mux_pin1 selection, combine with BLERD and BLESS 5'h00 blerd_ddft_out\\[0\\] 5'h01 rcb_tx_fifo_empty 5'h02 hv_ldo_lv_detect_raw 5'h03 dbus_rx_en 5'h04 1'b0 5'h05 clk_switch_to_sysclk 5'h06 ll_clk_en_sync 5'h07 dsm_entry_stat 5'h08 proc_tx_en 5'h09 rssi_read_start 5'h0A tx_2mbps 5'h0B rcb_bus_busy 5'h0C hv_ldo_en_mt (act_stdbyb) 5'h0D ll_eco_clk_en 5'h0E blerd_reset_assert 5'h0F hv_ldo_byp_n 5'h10 hv_ldo_lv_detect_mt 5'h11 enable_ldo 5'h12 enable_ldo_dly 5'h13 bless_rcb_le_out 5'h14 bless_rcb_clk_out 5'h15 bless_dig_ldo_on_out 5'h16 bless_act_ldo_en_out 5'h17 bless_clk_en_out 5'h18 bless_buck_en_out 5'h19 bless_ret_switch_hv_out 5'h1A efuse_rw_out 5'h1B efuse_avdd_out 5'h1C efuse_config_efuse_mode 5'h1D bless_dbus_tx_en_pad 5'h1E bless_bpktctl_rd 5'h1F 1'b0"]
#[inline(always)]
pub fn ddft_mux_cfg1(&self) -> DDFT_MUX_CFG1_R {
DDFT_MUX_CFG1_R::new(((self.bits >> 8) & 0x1f) as u8)
}
#[doc = "Bits 16:20 - dbg_mux_pin2 selection, combine with BLERD and BLESS 5'h00 blerd_ddft_out\\[1\\] 5'h01 rcb_rx_fifo_empty 5'h02 ll_decode_rxdata 5'h03 dbus_tx_en 5'h04 fw_clk_en 5'h05 interrupt_ll_n 5'h06 llh_st_sm 5'h07 llh_st_dsm 5'h08 proc_rx_en 5'h09 rssi_rx_done 5'h0A rx_2mbps 5'h0B rcb_ll_ctrl 5'h0C hv_ldo_byp_n 5'h0D reset_deassert 5'h0E rcb_intr 5'h0F rcb_ll_intr 5'h10 hv_ldo_en_mt (act_stdbyb) 5'h11 hv_ldo_lv_detect_raw 5'h12 bless_rcb_data_in 5'h13 bless_xtal_en_out 5'h14 bless_isolate_n_out 5'h15 bless_reset_n_out 5'h16 bless_ret_ldo_ol_hv_out 5'h17 bless_txd_rxd_out 5'h18 tx_rx_ctrl_sel 5'h19 bless_bpktctl_cy 5'h1A efuse_cs_out 5'h1B efuse_pgm_out 5'h1C efuse_sclk_out 5'h1D hv_ldo_lv_detect_mt 5'h1E enable_ldo 5'h1F enable_ldo_dly"]
#[inline(always)]
pub fn ddft_mux_cfg2(&self) -> DDFT_MUX_CFG2_R {
DDFT_MUX_CFG2_R::new(((self.bits >> 16) & 0x1f) as u8)
}
}
impl W {
#[doc = "Bit 0 - Enables the DDFT output from BLESS 1: DDFT is enabled 0: DDFT is disabled"]
#[inline(always)]
pub fn ddft_enable(&mut self) -> DDFT_ENABLE_W {
DDFT_ENABLE_W { w: self }
}
#[doc = "Bit 1 - Enables the DDFT inputs from CYBLERD55 chip 1: DDFT inputs are enabled 0: DDFT inputs are disabled"]
#[inline(always)]
pub fn blerd_ddft_en(&mut self) -> BLERD_DDFT_EN_W {
BLERD_DDFT_EN_W { w: self }
}
#[doc = "Bits 8:12 - dbg_mux_pin1 selection, combine with BLERD and BLESS 5'h00 blerd_ddft_out\\[0\\] 5'h01 rcb_tx_fifo_empty 5'h02 hv_ldo_lv_detect_raw 5'h03 dbus_rx_en 5'h04 1'b0 5'h05 clk_switch_to_sysclk 5'h06 ll_clk_en_sync 5'h07 dsm_entry_stat 5'h08 proc_tx_en 5'h09 rssi_read_start 5'h0A tx_2mbps 5'h0B rcb_bus_busy 5'h0C hv_ldo_en_mt (act_stdbyb) 5'h0D ll_eco_clk_en 5'h0E blerd_reset_assert 5'h0F hv_ldo_byp_n 5'h10 hv_ldo_lv_detect_mt 5'h11 enable_ldo 5'h12 enable_ldo_dly 5'h13 bless_rcb_le_out 5'h14 bless_rcb_clk_out 5'h15 bless_dig_ldo_on_out 5'h16 bless_act_ldo_en_out 5'h17 bless_clk_en_out 5'h18 bless_buck_en_out 5'h19 bless_ret_switch_hv_out 5'h1A efuse_rw_out 5'h1B efuse_avdd_out 5'h1C efuse_config_efuse_mode 5'h1D bless_dbus_tx_en_pad 5'h1E bless_bpktctl_rd 5'h1F 1'b0"]
#[inline(always)]
pub fn ddft_mux_cfg1(&mut self) -> DDFT_MUX_CFG1_W {
DDFT_MUX_CFG1_W { w: self }
}
#[doc = "Bits 16:20 - dbg_mux_pin2 selection, combine with BLERD and BLESS 5'h00 blerd_ddft_out\\[1\\] 5'h01 rcb_rx_fifo_empty 5'h02 ll_decode_rxdata 5'h03 dbus_tx_en 5'h04 fw_clk_en 5'h05 interrupt_ll_n 5'h06 llh_st_sm 5'h07 llh_st_dsm 5'h08 proc_rx_en 5'h09 rssi_rx_done 5'h0A rx_2mbps 5'h0B rcb_ll_ctrl 5'h0C hv_ldo_byp_n 5'h0D reset_deassert 5'h0E rcb_intr 5'h0F rcb_ll_intr 5'h10 hv_ldo_en_mt (act_stdbyb) 5'h11 hv_ldo_lv_detect_raw 5'h12 bless_rcb_data_in 5'h13 bless_xtal_en_out 5'h14 bless_isolate_n_out 5'h15 bless_reset_n_out 5'h16 bless_ret_ldo_ol_hv_out 5'h17 bless_txd_rxd_out 5'h18 tx_rx_ctrl_sel 5'h19 bless_bpktctl_cy 5'h1A efuse_cs_out 5'h1B efuse_pgm_out 5'h1C efuse_sclk_out 5'h1D hv_ldo_lv_detect_mt 5'h1E enable_ldo 5'h1F enable_ldo_dly"]
#[inline(always)]
pub fn ddft_mux_cfg2(&mut self) -> DDFT_MUX_CFG2_W {
DDFT_MUX_CFG2_W { w: self }
}
}
|
// This file is based on https://github.com/alce/tonic/blob/86bbb1d5a4844882dec81bef7c1a554bd9464adf/tonic-web/tonic-web/src/call.rs
use std::mem;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::{convert::TryInto, error::Error};
use byteorder::{BigEndian, ByteOrder};
use bytes::{Buf, BufMut, Bytes, BytesMut};
use futures::{ready, Stream};
use http::{
header::{self, HeaderName},
HeaderMap, HeaderValue,
};
use http_body::{Body, SizeHint};
use tonic::Status;
use self::content_types::*;
pub(crate) mod content_types {
use http::{header::CONTENT_TYPE, HeaderMap};
pub(crate) const GRPC_WEB: &str = "application/grpc-web";
pub(crate) const GRPC_WEB_PROTO: &str = "application/grpc-web+proto";
pub(crate) const GRPC_WEB_TEXT: &str = "application/grpc-web-text";
pub(crate) const GRPC_WEB_TEXT_PROTO: &str = "application/grpc-web-text+proto";
pub(crate) fn is_grpc_web(headers: &HeaderMap) -> bool {
matches!(
content_type(headers),
Some(GRPC_WEB) | Some(GRPC_WEB_PROTO) | Some(GRPC_WEB_TEXT) | Some(GRPC_WEB_TEXT_PROTO)
)
}
fn content_type(headers: &HeaderMap) -> Option<&str> {
headers.get(CONTENT_TYPE).and_then(|val| val.to_str().ok())
}
}
const BUFFER_SIZE: usize = 2 * 1024;
// 8th (MSB) bit of the 1st gRPC frame byte
// denotes an uncompressed trailer (as part of the body)
const GRPC_WEB_TRAILERS_BIT: u8 = 0b10000000;
const HEADER_SIZE: usize = 5;
#[derive(Copy, Clone, PartialEq, Debug)]
enum Mode {
Decode,
Encode,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum Encoding {
None,
Base64,
}
#[derive(Copy, Clone, PartialEq, Debug)]
enum State {
ReadHeader(usize),
ReadData(usize),
ReadTrailers(usize),
Done,
}
pub(crate) struct GrpcWebCall<B> {
inner: B,
buf: BytesMut,
header_buf: BytesMut,
mode: Mode,
encoding: Encoding,
decode_trailers: bool,
poll_trailers: bool,
state: State,
trailers: HeaderMap<HeaderValue>,
}
impl<B> GrpcWebCall<B>
where
B: Body<Data = Bytes> + Unpin,
B::Error: Error,
{
pub(crate) fn server_request(inner: B, encoding: Encoding) -> Self {
Self::new(inner, Mode::Decode, encoding, true, false)
}
pub(crate) fn server_response(inner: B, encoding: Encoding) -> Self {
Self::new(inner, Mode::Encode, encoding, true, false)
}
pub(crate) fn client_request(inner: B, encoding: Encoding) -> Self {
Self::new(inner, Mode::Encode, encoding, false, false)
}
pub(crate) fn client_response(inner: B, encoding: Encoding) -> Self {
Self::new(inner, Mode::Decode, encoding, true, true)
}
fn new(
inner: B,
mode: Mode,
encoding: Encoding,
poll_trailers: bool,
decode_trailers: bool,
) -> Self {
GrpcWebCall {
inner,
buf: BytesMut::with_capacity(match (mode, encoding) {
(Mode::Encode, Encoding::Base64) => BUFFER_SIZE,
_ => 0,
}),
header_buf: BytesMut::with_capacity(if decode_trailers { HEADER_SIZE } else { 0 }),
mode,
encoding,
poll_trailers,
decode_trailers,
state: State::ReadHeader(5),
trailers: HeaderMap::new(),
}
}
#[inline]
fn max_decodable(&self) -> usize {
(self.buf.len() / 4) * 4
}
fn decode_chunk(&mut self) -> Result<Option<Bytes>, Status> {
// not enough bytes to decode
if self.buf.is_empty() || self.buf.len() < 4 {
return Ok(None);
}
// Split `buf` at the largest index that is multiple of 4. Decode the
// returned `Bytes`, keeping the rest for the next attempt to decode.
base64::decode(self.buf.split_to(self.max_decodable()).freeze())
.map(|decoded| Some(Bytes::from(decoded)))
.map_err(internal_error)
}
// Key-value pairs encoded as a HTTP/1 headers block (without the terminating newline)
fn encode_trailers(&self, trailers: HeaderMap) -> Vec<u8> {
trailers.iter().fold(Vec::new(), |mut acc, (key, value)| {
acc.put_slice(key.as_ref());
acc.push(b':');
acc.put_slice(value.as_bytes());
acc.put_slice(b"\r\n");
acc
})
}
fn make_trailers_frame(&self, trailers: HeaderMap) -> Vec<u8> {
let trailers = self.encode_trailers(trailers);
let len = trailers.len();
assert!(len <= u32::MAX as usize);
let mut frame = Vec::with_capacity(len + HEADER_SIZE);
frame.push(GRPC_WEB_TRAILERS_BIT);
frame.put_u32(len as u32);
frame.extend(trailers);
frame
}
fn handle_frames(&mut self, mut bytes: Bytes) -> Result<Bytes, <B as Body>::Error> {
if !self.decode_trailers {
return Ok(bytes);
}
let mut curr_idx: usize = 0;
let mut return_len = bytes.len();
loop {
if self.state == State::Done || curr_idx == bytes.len() {
bytes.truncate(return_len);
return Ok(bytes);
}
match self.state {
State::ReadHeader(mut remaining) => {
let copy_len = if bytes.len() - curr_idx < remaining {
bytes.len() - curr_idx
} else {
remaining
};
self.header_buf
.extend_from_slice(&bytes[curr_idx..curr_idx + copy_len]);
curr_idx += copy_len;
remaining -= copy_len;
let is_trailer = self.header_buf[0] & GRPC_WEB_TRAILERS_BIT != 0;
if remaining > 0 {
self.state = State::ReadHeader(remaining);
if is_trailer {
// don't return trailers frame
return_len = bytes.len() - copy_len;
}
continue;
}
let frame_len: usize = BigEndian::read_u32(&self.header_buf[1..])
.try_into()
.unwrap();
self.header_buf.clear();
if is_trailer {
self.header_buf.reserve(frame_len);
self.state = State::ReadTrailers(frame_len);
return_len = curr_idx - copy_len;
} else {
self.state = State::ReadData(frame_len);
}
}
State::ReadData(remaining) => {
let buf_remaining = bytes.len() - curr_idx;
if buf_remaining < remaining {
self.state = State::ReadData(remaining - bytes.len());
return Ok(bytes);
} else {
self.state = State::ReadHeader(HEADER_SIZE);
curr_idx += remaining;
}
}
State::ReadTrailers(remaining) => {
if curr_idx == 0 {
// if we just read a header, then the return_len is already correct, otherwise zero it out.
return_len = 0;
}
let buf_remaining = bytes.len() - curr_idx;
if buf_remaining < remaining {
self.header_buf.extend_from_slice(&bytes[curr_idx..]);
self.state = State::ReadTrailers(remaining - buf_remaining);
curr_idx += buf_remaining;
continue;
}
self.header_buf
.extend_from_slice(&bytes[curr_idx..curr_idx + remaining]);
let mut header_bytes = mem::replace(&mut self.header_buf, BytesMut::new());
let mut trailers = [httparse::EMPTY_HEADER; 64];
header_bytes.extend_from_slice(b"\n"); // parse_headers returns Status::Partial without this
let (_, trailers) =
httparse::parse_headers(header_bytes.as_ref(), &mut trailers)
.unwrap()
.unwrap();
self.trailers.reserve(trailers.len());
for h in trailers {
self.trailers.append(
HeaderName::from_bytes(h.name.as_bytes()).unwrap(),
HeaderValue::from_bytes(h.value).unwrap(),
);
}
self.state = State::Done;
}
State::Done => {}
}
}
}
}
impl<B> GrpcWebCall<B>
where
B: Body<Data = Bytes> + Unpin,
B::Error: Error,
{
fn poll_decode(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<B::Data, Status>>> {
match self.encoding {
Encoding::Base64 => loop {
if let Some(bytes) = self.decode_chunk()? {
return Poll::Ready(Some(self.handle_frames(bytes).map_err(internal_error)));
}
match ready!(Pin::new(&mut self.inner).poll_data(cx)) {
Some(Ok(data)) => self.buf.put(data),
Some(Err(e)) => return Poll::Ready(Some(Err(internal_error(e)))),
None => {
return if self.buf.has_remaining() {
Poll::Ready(Some(Err(internal_error("malformed base64 request"))))
} else {
Poll::Ready(None)
}
}
}
},
Encoding::None => match ready!(Pin::new(&mut self.inner).poll_data(cx)) {
Some(res) => Poll::Ready(Some(
res.and_then(|b| self.handle_frames(b))
.map_err(internal_error),
)),
None => Poll::Ready(None),
},
}
}
fn poll_encode(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<B::Data, Status>>> {
if let Some(mut res) = ready!(Pin::new(&mut self.inner).poll_data(cx)) {
if self.encoding == Encoding::Base64 {
res = res.map(|b| base64::encode(b).into())
}
return Poll::Ready(Some(res.map_err(internal_error)));
}
// this flag is needed because the inner stream never
// returns Poll::Ready(None) when polled for trailers
if self.poll_trailers {
return match ready!(Pin::new(&mut self.inner).poll_trailers(cx)) {
Ok(Some(map)) => {
let mut frame = self.make_trailers_frame(map);
if self.encoding == Encoding::Base64 {
frame = base64::encode(frame).into_bytes();
}
self.poll_trailers = false;
Poll::Ready(Some(Ok(frame.into())))
}
Ok(None) => Poll::Ready(None),
Err(e) => Poll::Ready(Some(Err(internal_error(e)))),
};
}
Poll::Ready(None)
}
}
impl<B> Body for GrpcWebCall<B>
where
B: Body<Data = Bytes> + Unpin,
B::Error: Error,
{
type Data = Bytes;
type Error = Status;
fn poll_data(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Self::Data, Self::Error>>> {
match self.mode {
Mode::Decode => self.poll_decode(cx),
Mode::Encode => self.poll_encode(cx),
}
}
fn poll_trailers(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<Option<HeaderMap<HeaderValue>>, Self::Error>> {
if !self.decode_trailers {
return Poll::Ready(Ok(None));
}
loop {
if self.state == State::Done {
return Poll::Ready(Ok(Some(mem::replace(&mut self.trailers, HeaderMap::new()))));
}
match ready!(self.as_mut().poll_decode(cx)) {
Some(Err(e)) => return Poll::Ready(Err(e)),
_ => {}
};
}
}
fn is_end_stream(&self) -> bool {
self.inner.is_end_stream()
}
fn size_hint(&self) -> SizeHint {
self.inner.size_hint()
}
}
impl<B> Stream for GrpcWebCall<B>
where
B: Body<Data = Bytes> + Unpin,
B::Error: Error,
{
type Item = Result<Bytes, Status>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Body::poll_data(self, cx)
}
}
impl Encoding {
pub(crate) fn from_content_type(headers: &HeaderMap) -> Encoding {
Self::from_header(headers.get(header::CONTENT_TYPE))
}
pub(crate) fn from_accept(headers: &HeaderMap) -> Encoding {
Self::from_header(headers.get(header::ACCEPT))
}
pub(crate) fn to_content_type(&self) -> &'static str {
match self {
Encoding::Base64 => GRPC_WEB_TEXT_PROTO,
Encoding::None => GRPC_WEB_PROTO,
}
}
fn from_header(value: Option<&HeaderValue>) -> Encoding {
match value.and_then(|val| val.to_str().ok()) {
Some(GRPC_WEB_TEXT_PROTO) | Some(GRPC_WEB_TEXT) => Encoding::Base64,
_ => Encoding::None,
}
}
}
fn internal_error(e: impl std::fmt::Display) -> Status {
Status::internal(e.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn encoding_constructors() {
let cases = &[
(GRPC_WEB, Encoding::None),
(GRPC_WEB_PROTO, Encoding::None),
(GRPC_WEB_TEXT, Encoding::Base64),
(GRPC_WEB_TEXT_PROTO, Encoding::Base64),
("foo", Encoding::None),
];
let mut headers = HeaderMap::new();
for case in cases {
headers.insert(header::CONTENT_TYPE, case.0.parse().unwrap());
headers.insert(header::ACCEPT, case.0.parse().unwrap());
assert_eq!(Encoding::from_content_type(&headers), case.1, "{}", case.0);
assert_eq!(Encoding::from_accept(&headers), case.1, "{}", case.0);
}
}
}
|
#[doc = "Register `I2SCFGR` reader"]
pub type R = crate::R<I2SCFGR_SPEC>;
#[doc = "Register `I2SCFGR` writer"]
pub type W = crate::W<I2SCFGR_SPEC>;
#[doc = "Field `CHLEN` reader - Channel length (number of bits per audio channel)"]
pub type CHLEN_R = crate::BitReader;
#[doc = "Field `CHLEN` writer - Channel length (number of bits per audio channel)"]
pub type CHLEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DATLEN` reader - Data length to be transferred"]
pub type DATLEN_R = crate::FieldReader;
#[doc = "Field `DATLEN` writer - Data length to be transferred"]
pub type DATLEN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `CKPOL` reader - Steady state clock polarity"]
pub type CKPOL_R = crate::BitReader;
#[doc = "Field `CKPOL` writer - Steady state clock polarity"]
pub type CKPOL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2SSTD` reader - I2S standard selection"]
pub type I2SSTD_R = crate::FieldReader;
#[doc = "Field `I2SSTD` writer - I2S standard selection"]
pub type I2SSTD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `PCMSYNC` reader - PCM frame synchronization"]
pub type PCMSYNC_R = crate::BitReader;
#[doc = "Field `PCMSYNC` writer - PCM frame synchronization"]
pub type PCMSYNC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2SCFG` reader - I2S configuration mode"]
pub type I2SCFG_R = crate::FieldReader;
#[doc = "Field `I2SCFG` writer - I2S configuration mode"]
pub type I2SCFG_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `I2SE` reader - I2S Enable"]
pub type I2SE_R = crate::BitReader;
#[doc = "Field `I2SE` writer - I2S Enable"]
pub type I2SE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `I2SMOD` reader - I2S mode selection"]
pub type I2SMOD_R = crate::BitReader;
#[doc = "Field `I2SMOD` writer - I2S mode selection"]
pub type I2SMOD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Channel length (number of bits per audio channel)"]
#[inline(always)]
pub fn chlen(&self) -> CHLEN_R {
CHLEN_R::new((self.bits & 1) != 0)
}
#[doc = "Bits 1:2 - Data length to be transferred"]
#[inline(always)]
pub fn datlen(&self) -> DATLEN_R {
DATLEN_R::new(((self.bits >> 1) & 3) as u8)
}
#[doc = "Bit 3 - Steady state clock polarity"]
#[inline(always)]
pub fn ckpol(&self) -> CKPOL_R {
CKPOL_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bits 4:5 - I2S standard selection"]
#[inline(always)]
pub fn i2sstd(&self) -> I2SSTD_R {
I2SSTD_R::new(((self.bits >> 4) & 3) as u8)
}
#[doc = "Bit 7 - PCM frame synchronization"]
#[inline(always)]
pub fn pcmsync(&self) -> PCMSYNC_R {
PCMSYNC_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bits 8:9 - I2S configuration mode"]
#[inline(always)]
pub fn i2scfg(&self) -> I2SCFG_R {
I2SCFG_R::new(((self.bits >> 8) & 3) as u8)
}
#[doc = "Bit 10 - I2S Enable"]
#[inline(always)]
pub fn i2se(&self) -> I2SE_R {
I2SE_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - I2S mode selection"]
#[inline(always)]
pub fn i2smod(&self) -> I2SMOD_R {
I2SMOD_R::new(((self.bits >> 11) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Channel length (number of bits per audio channel)"]
#[inline(always)]
#[must_use]
pub fn chlen(&mut self) -> CHLEN_W<I2SCFGR_SPEC, 0> {
CHLEN_W::new(self)
}
#[doc = "Bits 1:2 - Data length to be transferred"]
#[inline(always)]
#[must_use]
pub fn datlen(&mut self) -> DATLEN_W<I2SCFGR_SPEC, 1> {
DATLEN_W::new(self)
}
#[doc = "Bit 3 - Steady state clock polarity"]
#[inline(always)]
#[must_use]
pub fn ckpol(&mut self) -> CKPOL_W<I2SCFGR_SPEC, 3> {
CKPOL_W::new(self)
}
#[doc = "Bits 4:5 - I2S standard selection"]
#[inline(always)]
#[must_use]
pub fn i2sstd(&mut self) -> I2SSTD_W<I2SCFGR_SPEC, 4> {
I2SSTD_W::new(self)
}
#[doc = "Bit 7 - PCM frame synchronization"]
#[inline(always)]
#[must_use]
pub fn pcmsync(&mut self) -> PCMSYNC_W<I2SCFGR_SPEC, 7> {
PCMSYNC_W::new(self)
}
#[doc = "Bits 8:9 - I2S configuration mode"]
#[inline(always)]
#[must_use]
pub fn i2scfg(&mut self) -> I2SCFG_W<I2SCFGR_SPEC, 8> {
I2SCFG_W::new(self)
}
#[doc = "Bit 10 - I2S Enable"]
#[inline(always)]
#[must_use]
pub fn i2se(&mut self) -> I2SE_W<I2SCFGR_SPEC, 10> {
I2SE_W::new(self)
}
#[doc = "Bit 11 - I2S mode selection"]
#[inline(always)]
#[must_use]
pub fn i2smod(&mut self) -> I2SMOD_W<I2SCFGR_SPEC, 11> {
I2SMOD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "I2S configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`i2scfgr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`i2scfgr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct I2SCFGR_SPEC;
impl crate::RegisterSpec for I2SCFGR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`i2scfgr::R`](R) reader structure"]
impl crate::Readable for I2SCFGR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`i2scfgr::W`](W) writer structure"]
impl crate::Writable for I2SCFGR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets I2SCFGR to value 0"]
impl crate::Resettable for I2SCFGR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::Entity;
pub trait Insert<E: Entity> {
fn insert(&mut self, k: E::Key, v: E);
}
impl<E, T> Insert<E> for &mut T
where
E: Entity,
T: Insert<E>,
{
fn insert(&mut self, k: E::Key, v: E) {
(**self).insert(k, v)
}
}
|
use crate::{bool_to_option, event_details_into};
use gloo::events::EventListener;
use js_sys::Object;
use std::borrow::Cow;
use wasm_bindgen::prelude::*;
use web_sys::Element;
use yew::prelude::*;
#[wasm_bindgen(module = "/build/mwc-tab.js")]
extern "C" {
#[derive(Debug)]
type Tab;
#[wasm_bindgen(getter, static_method_of = Tab)]
fn _dummy_loader() -> JsValue;
}
loader_hack!(Tab);
/// The `mwc-tab` component
///
/// [MWC Documentation](https://github.com/material-components/material-components-web-components/tree/master/packages/tab)
pub struct MatTab {
props: TabProps,
node_ref: NodeRef,
interacted_listener: Option<EventListener>,
}
/// Props for `MatTab`
///
/// MWC Documentation [properties](https://github.com/material-components/material-components-web-components/tree/master/packages/tab#propertiesattributes)
/// and [events](https://github.com/material-components/material-components-web-components/tree/master/packages/tab#events)
#[derive(Debug, Properties, Clone)]
pub struct TabProps {
#[prop_or_default]
pub label: Cow<'static, str>,
#[prop_or_default]
pub icon: Cow<'static, str>,
#[prop_or_default]
pub has_image_icon: bool,
#[prop_or_default]
pub indicator_icon: Cow<'static, str>,
#[prop_or_default]
pub is_fading_indicator: bool,
#[prop_or_default]
pub min_width: bool,
#[prop_or_default]
pub is_min_width_indicator: bool,
#[prop_or_default]
pub stacked: bool,
/// Binds to `MDCTab:interacted` event on `mwc-tab`
///
/// See events docs to learn more.
#[prop_or_default]
pub oninteracted: Callback<String>,
#[prop_or_default]
pub children: Children,
}
impl Component for MatTab {
type Message = ();
type Properties = TabProps;
fn create(props: Self::Properties, _: ComponentLink<Self>) -> Self {
Tab::ensure_loaded();
Self {
props,
node_ref: NodeRef::default(),
interacted_listener: None,
}
}
fn update(&mut self, _msg: Self::Message) -> ShouldRender {
false
}
fn change(&mut self, props: Self::Properties) -> bool {
self.props = props;
true
}
fn view(&self) -> Html {
html! {
<mwc-tab
label=self.props.label.clone()
icon=self.props.icon.clone()
hasImageIcon=bool_to_option(self.props.has_image_icon)
indicatorIcon=self.props.indicator_icon.clone()
isFadingIndicator=bool_to_option(self.props.is_fading_indicator)
minWidth=bool_to_option(self.props.min_width)
isMinWidthIndicator=bool_to_option(self.props.is_min_width_indicator)
stacked=bool_to_option(self.props.stacked)
ref=self.node_ref.clone()
>{ self.props.children.clone() }</mwc-tab>
}
}
fn rendered(&mut self, _first_render: bool) {
if self.interacted_listener.is_none() {
let element = self.node_ref.cast::<Element>().unwrap();
let on_interacted = self.props.oninteracted.clone();
self.interacted_listener = Some(EventListener::new(
&element,
"MDCTab:interacted",
move |event| {
let detail = event_details_into::<InteractedDetailJS>(event);
on_interacted.emit(detail.tab_id());
},
));
}
}
}
#[wasm_bindgen]
extern "C" {
#[derive(Debug)]
#[wasm_bindgen(extends = Object)]
type InteractedDetailJS;
#[wasm_bindgen(method, getter, js_name=tabId)]
fn tab_id(this: &InteractedDetailJS) -> String;
}
|
use multiaddr::{Multiaddr, Protocol};
fn is_ip(p:Protocol) -> bool {
match p {
Protocol::IP4 | Protocol::IP6 => true,
_ => false,
}
}
// Comment from go-multiaddr-net:
//
// "IsThinWaist returns whether a Multiaddr starts with "Thin Waist" Protocols.
// This means: /{IP4, IP6}[/{TCP, UDP}]"
fn is_thin_waist(m: &Multiaddr) -> bool {
let protocol = m.protocol();
if protocol.len() == 0 {
return false
}
let p1 = protocol[0];
if !is_ip(p1) {
return false
}
if protocol.len() == 1 {
return true
}
let p2 = protocol[1];
is_ip(p1) && (p2 == Protocol::TCP || p2 == Protocol::UDP || is_ip(p2))
}
#[cfg(test)]
mod tests {
use multiaddr::{Multiaddr};
#[test]
fn thin_waist() {
let test_maddrs = vec![
"/ip4/127.0.0.1/udp/1234",
"/ip4/127.0.0.1/tcp/1234",
"/ip4/1.2.3.4",
"/ip4/0.0.0.0",
"/ip6/::1",
"/ip6/2601:9:4f81:9700:803e:ca65:66e8:c21",
"/ip6/2601:9:4f81:9700:803e:ca65:66e8:c21/udp/1234"
];
for maddr_str in &test_maddrs {
let maddr = match Multiaddr::new(maddr_str) {
Err(e) => panic!("Error parsing multiaddr {}: {}", maddr_str, e),
Ok(maddr) => maddr,
};
assert!(super::is_thin_waist(&maddr));
}
}
}
|
//http://rustbyexample.com/fn/closures/input_parameters.html
fn apply<F>(f: F) where
F: FnOnce(){
f();
}
pub fn input_param() {
let greeting = "hello";
// let farewell = "goodbye".to_owned();
let diary = || {
println!("I said {}", greeting);
};
apply(diary);
}
|
use std::path::PathBuf;
use std::process::{Command, Stdio};
use rsh::State;
// TODO "Error handling mother trucker, do you speak it?"
pub fn exec(s: &State) -> i32 {
let mut args = s.argv.iter();
let mut exec_path: Option<PathBuf> = None;
let exec_name = args.next().unwrap().as_str();
'outer: for path in s.exec_paths().iter() {
let read_dir = path.read_dir();
if read_dir.is_err() {
continue;
}
for entry in read_dir.unwrap() {
if let Ok(e) = entry {
if e.file_name() == *exec_name {
exec_path = Some(e.path().clone());
break 'outer;
}
}
}
}
if exec_path.is_none() {
println!("No such command: {}", exec_name);
return 1000;
}
let command = Command::new(exec_path.unwrap().to_str().unwrap())
.args(&args.collect::<Vec<&String>>())
.current_dir(s.cwd.clone())
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn();
if command.is_err() {
println!("Error running {}: {}", exec_name, command.err().unwrap());
return 1000;
}
match command.unwrap().wait() {
Ok(exit) => if let Some(i) = exit.code() { i } else { 1000 },
Err(e) => {
println!("Error starting {}: {}", exec_name, e);
1000
}
}
}
|
use std::collections::HashMap;
pub struct Response {
pub version: String,
pub response_phrase: String,
pub response_code: i32,
headers: HashMap<String, String>,
pub body: Vec<u8>,
}
impl Response {
pub fn new(version: &str, response_code: i32) -> Response {
Response {
version: String::from(version),
response_code: response_code,
response_phrase: _phrase_from_code(response_code),
headers: HashMap::new(),
body: Vec::new(),
}
}
pub fn get_header(&self, name: &str) -> Option<&String> {
self.headers.get(&String::from(name))
}
pub fn set_header(mut self, name: &str, value: &str) -> Self {
self.headers.insert(String::from(name), String::from(value));
self
}
pub fn set_body(mut self, body: &[u8]) -> Self {
self.body.clear();
self.body.extend_from_slice(&body);
self
}
pub fn serialize(&self) -> Vec<u8> {
let str_rep = _to_str_without_body(self);
let mut ret = Vec::new();
ret.extend_from_slice(str_rep.as_str().as_bytes());
ret.extend_from_slice(self.body.as_slice());
ret
}
}
fn _to_str_without_body(res: &Response) -> String {
let start_line = format!("HTTP/{} {} {}",
res.version, res.response_code, res.response_phrase);
let headers = res.headers.iter().fold(String::from(""),
|acc, next| format!("{}{}: {}\r\n", acc, next.0, next.1))
+ "\r\n";
format!("{}\r\n{}", start_line, headers)
}
fn _phrase_from_code(code: i32) -> String {
String::from(match code {
200 => "OK",
404 => "NOT FOUND",
_ => "",
})
}
|
use super::SampleFitness;
use std::hash::Hash;
use std::collections::HashMap;
impl<T> Hash for SampleFitness<T>
where
T: Hash
{
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.sample.hash(state);
}
}
|
fn main() {
let mut v: Vec<i32> = (0..=100).collect();
v.retain(|n| n % 3 != 0);
println!("{:?}", v);
}
|
use crossbeam::channel::unbounded;
use std::fs;
use std::path::PathBuf;
use hshchk::hash_file_process::*;
use hshchk::{HashFileFormat, HashType};
extern crate test_shared;
// #[path = "../src/test/mod.rs"]
// mod test;
static HASHCHECK_SHA1_NAME: &str = "hshchk.sha1";
static HASHCHECK_MD5_NAME: &str = "hshchk.md5";
static HASHSUM_SHA1_NAME: &str = "SHA1SUMS";
static HASHCHECK_SHA1_CONTENT: &str = "file|4|a17c9aaa61e80a1bf71d0d850af4e5baa9800bbd\n";
static HASHCHECK_MD5_CONTENT: &str = "file|4|8d777f385d3dfec8815d20f7496026dc\n";
static HASHSUM_SHA1_CONTENT: &str = "a17c9aaa61e80a1bf71d0d850af4e5baa9800bbd *file\n";
#[test]
fn hash_file_process_create_no_files_processed() {
let dir = test_shared::create_tmp_dir();
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
hash_type: Some(HashType::SHA1),
..Default::default()
});
assert_eq!(processor.process(), HashFileProcessResult::NoFilesProcessed);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_create() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
hash_type: Some(HashType::SHA1),
..Default::default()
});
assert_eq!(processor.process(), HashFileProcessResult::Success);
let checksum_file = dir.join(HASHCHECK_SHA1_NAME);
assert_eq!(
test_shared::get_file_string_content(&checksum_file),
HASHCHECK_SHA1_CONTENT
);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_create_md5() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
hash_type: Some(HashType::MD5),
..Default::default()
});
assert_eq!(processor.process(), HashFileProcessResult::Success);
let checksum_file = dir.join(HASHCHECK_MD5_NAME);
assert_eq!(
test_shared::get_file_string_content(&checksum_file),
HASHCHECK_MD5_CONTENT
);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_create_force() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let checksum_file = test_shared::create_file_with_content(&dir, HASHCHECK_SHA1_NAME, "test");
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
hash_type: Some(HashType::SHA1),
force_create: Some(true),
..Default::default()
});
assert_eq!(processor.process(), HashFileProcessResult::Success);
assert_eq!(
test_shared::get_file_string_content(&checksum_file),
HASHCHECK_SHA1_CONTENT
);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_create_ignore() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let _ = test_shared::create_file_with_content(&dir, "ignore", "test");
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
ignore_pattern: Some("ignore"),
..Default::default()
});
assert_eq!(processor.process(), HashFileProcessResult::Success);
let checksum_file = dir.join(HASHCHECK_SHA1_NAME);
assert_eq!(
test_shared::get_file_string_content(&checksum_file),
HASHCHECK_SHA1_CONTENT
);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_create_match() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let _ = test_shared::create_file_with_content(&dir, "unmatched", "test");
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
match_pattern: Some("file"),
..Default::default()
});
assert_eq!(processor.process(), HashFileProcessResult::Success);
let checksum_file = dir.join(HASHCHECK_SHA1_NAME);
assert_eq!(
test_shared::get_file_string_content(&checksum_file),
HASHCHECK_SHA1_CONTENT
);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let _ =
test_shared::create_file_with_content(&dir, HASHCHECK_SHA1_NAME, HASHCHECK_SHA1_CONTENT);
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
..Default::default()
});
let (sender, receiver) = unbounded();
let sender_error = sender.clone();
processor.set_error_event_sender(sender_error);
let sender_warning = sender.clone();
processor.set_warning_event_sender(sender_warning);
assert_eq!(processor.process(), HashFileProcessResult::Success);
assert!(receiver.try_recv().is_err());
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify_missing() {
let dir = test_shared::create_tmp_dir();
let _ =
test_shared::create_file_with_content(&dir, HASHCHECK_SHA1_NAME, HASHCHECK_SHA1_CONTENT);
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
..Default::default()
});
let (sender, receiver) = unbounded();
let sender_error = sender.clone();
processor.set_error_event_sender(sender_error);
assert_eq!(processor.process(), HashFileProcessResult::Error);
assert_eq!(
FileProcessEntry {
file_path: PathBuf::from("file"),
state: FileProcessState::Missing
},
receiver.recv().unwrap()
);
assert!(receiver.try_recv().is_err());
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify_incorrect_size() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "datadata");
let _ =
test_shared::create_file_with_content(&dir, HASHCHECK_SHA1_NAME, HASHCHECK_SHA1_CONTENT);
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
..Default::default()
});
let (sender, receiver) = unbounded();
let sender_error = sender.clone();
processor.set_error_event_sender(sender_error);
assert_eq!(processor.process(), HashFileProcessResult::Error);
assert_eq!(
FileProcessEntry {
file_path: PathBuf::from("file"),
state: FileProcessState::IncorrectSize
},
receiver.recv().unwrap()
);
assert!(receiver.try_recv().is_err());
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify_incorrect_hash() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "tada");
let _ =
test_shared::create_file_with_content(&dir, HASHCHECK_SHA1_NAME, HASHCHECK_SHA1_CONTENT);
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
..Default::default()
});
let (sender, receiver) = unbounded();
let sender_error = sender.clone();
processor.set_error_event_sender(sender_error);
assert_eq!(processor.process(), HashFileProcessResult::Error);
assert_eq!(
FileProcessEntry {
file_path: PathBuf::from("file"),
state: FileProcessState::IncorrectHash
},
receiver.recv().unwrap()
);
assert!(receiver.try_recv().is_err());
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify_report_extra() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let _ =
test_shared::create_file_with_content(&dir, HASHCHECK_SHA1_NAME, HASHCHECK_SHA1_CONTENT);
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
report_extra: Some(true),
..Default::default()
});
let (error_sender, error_receiver) = unbounded();
let (warning_sender, warning_receiver) = unbounded();
let sender_error = error_sender.clone();
processor.set_error_event_sender(sender_error);
let sender_warning = warning_sender.clone();
processor.set_warning_event_sender(sender_warning);
let _ = test_shared::create_file_with_content(&dir, "extra", "test");
assert_eq!(processor.process(), HashFileProcessResult::Success);
assert_eq!(
FileProcessEntry {
file_path: PathBuf::from("extra"),
state: FileProcessState::Extra
},
warning_receiver.recv().unwrap()
);
assert!(error_receiver.try_recv().is_err());
assert!(warning_receiver.try_recv().is_err());
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify_size_only() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "tada");
let _ =
test_shared::create_file_with_content(&dir, HASHCHECK_SHA1_NAME, HASHCHECK_SHA1_CONTENT);
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
size_only: Some(true),
..Default::default()
});
let (error_sender, error_receiver) = unbounded();
let (warning_sender, warning_receiver) = unbounded();
let sender_error = error_sender.clone();
processor.set_error_event_sender(sender_error);
let sender_warning = warning_sender.clone();
processor.set_warning_event_sender(sender_warning);
assert_eq!(processor.process(), HashFileProcessResult::Success);
assert!(error_receiver.try_recv().is_err());
assert!(warning_receiver.try_recv().is_err());
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify_ignore() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let ignore_file = test_shared::create_file_with_content(&dir, "ignore", "test");
let mut processor_create = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
..Default::default()
});
assert_eq!(processor_create.process(), HashFileProcessResult::Success);
let mut processor_verify = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
ignore_pattern: Some("ignore"),
..Default::default()
});
std::fs::remove_file(ignore_file).expect("Failed to remove ignored file.");
assert_eq!(processor_verify.process(), HashFileProcessResult::Success);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_verify_match() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let unmatched_file = test_shared::create_file_with_content(&dir, "unmatched", "test");
let mut processor_create = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
..Default::default()
});
assert_eq!(processor_create.process(), HashFileProcessResult::Success);
let mut processor_verify = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
match_pattern: Some("file"),
..Default::default()
});
std::fs::remove_file(unmatched_file).expect("Failed to remove unmatched file.");
assert_eq!(processor_verify.process(), HashFileProcessResult::Success);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_hashsum_create() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
hash_type: Some(HashType::SHA1),
hash_file_format: Some(HashFileFormat::HashSum),
..Default::default()
});
assert_eq!(processor.process(), HashFileProcessResult::Success);
let checksum_file = dir.join(HASHSUM_SHA1_NAME);
assert_eq!(
test_shared::get_file_string_content(&checksum_file),
HASHSUM_SHA1_CONTENT
);
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
#[test]
fn hash_file_process_hashsum_verify() {
let dir = test_shared::create_tmp_dir();
let _ = test_shared::create_file_with_content(&dir, "file", "data");
let _ = test_shared::create_file_with_content(&dir, HASHSUM_SHA1_NAME, HASHSUM_SHA1_CONTENT);
let mut processor = HashFileProcessor::new(HashFileProcessOptions {
base_path: dir.clone(),
..Default::default()
});
let (sender, receiver) = unbounded();
let sender_error = sender.clone();
processor.set_error_event_sender(sender_error);
let sender_warning = sender.clone();
processor.set_warning_event_sender(sender_warning);
assert_eq!(processor.process(), HashFileProcessResult::Success);
assert!(receiver.try_recv().is_err());
fs::remove_dir_all(dir).expect("Failed to remove test directory.");
}
|
use std::collections::HashSet;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::iter::FromIterator;
use std::path::Path;
fn main() -> Result<(), std::io::Error> {
let input_path = Path::new("input.txt");
let reader = BufReader::new(File::open(&input_path)?);
let input = reader
.lines()
.map(|line| line.unwrap().parse())
.flatten()
.collect();
let p1 = part_01(&input);
let p2 = part_02(&input);
println!("{}", p1.unwrap());
println!("{}", p2.unwrap());
Ok(())
}
fn part_01(input: &Vec<i32>) -> Option<i32> {
let numbers = HashSet::<i32>::from_iter(input.iter().cloned());
numbers
.iter()
.find_map(|a| numbers.get(&(2020 - a)).map(|b| a * b))
}
fn part_02(input: &Vec<i32>) -> Option<i32> {
let numbers = HashSet::<i32>::from_iter(input.iter().cloned());
numbers.iter().find_map(|a| {
numbers
.iter()
.filter(|&b| a != b)
.find_map(|b| numbers.get(&(2020 - a - b)).map(|c| a * b * c))
})
}
#[cfg(test)]
mod tests {
#[test]
fn part_01() {
let input = vec![151, 1020, 1000, 500, 1500, 20];
let val = super::part_01(&input);
assert_eq!(val.unwrap(), 1000 * 1020);
}
#[test]
fn part_02() {
let input = vec![151, 1020, 1000, 300, 440, 580];
let val = super::part_02(&input);
assert_eq!(val.unwrap(), 1000 * 440 * 580);
}
}
|
/// Utility Code For Creating Disp Arrays (array + fat pointer)
/// in LLVM
use super::{extract_type_from_pointer, CodegenResult, Context, LLVMInstruction, Object, Type};
/// array_value_pointer should not be an actual pointer, but the
/// index in the scope in which the pointer actually lives.
pub fn create_array(
context: &mut Context,
subtype: &Type,
array_value_pointer: usize,
length: i64,
) -> CodegenResult<Object> {
let array_type = Type::Array(Box::new(subtype.clone()));
// the array type is a pointer to the struct, so
// we need to get the actual struct type to construct
// the underlying object.
let struct_type = extract_type_from_pointer(context.compiler.llvm.types.get(&array_type));
let object = context.allocate(array_type);
context.add_instruction(LLVMInstruction::BuildAlloca {
llvm_type: struct_type,
target: object.index,
});
let zero_value = context.const_i32(0);
let one_value = context.const_i32(1);
// assign the array pointer first
let array_pointer = context.allocate_without_type();
context.add_instruction(LLVMInstruction::BuildGEP {
value: object.index,
// first element of object pointer, first field
indices: vec![zero_value.index, zero_value.index],
target: array_pointer,
});
context.add_instruction(LLVMInstruction::BuildStore {
source: array_value_pointer,
target: array_pointer,
});
// set the length next
let length_pointer = context.allocate_without_type();
let length_value = context.allocate(Type::Int);
context.add_instruction(LLVMInstruction::BuildGEP {
value: object.index,
// first element of object pointer, second field
indices: vec![zero_value.index, one_value.index],
target: length_pointer,
});
context.add_instruction(LLVMInstruction::ConstInt {
// value: s.len() as i64,
value: length,
target: length_value.index,
});
context.add_instruction(LLVMInstruction::BuildStore {
source: length_value.index,
target: length_pointer,
});
// finally, return the object
Ok(object)
}
|
use super::Token;
use proc_macro2::{Ident, Span};
use quote::ToTokens;
use syn::punctuated::Punctuated;
use syn::{
ImplItem, ImplItemConst, ImplItemMacro, ImplItemMethod, ImplItemType, ItemImpl, ItemTrait,
TraitItem, TraitItemConst, TraitItemMacro, TraitItemMethod, TraitItemType, Visibility,
};
fn convert_method(m: ImplItemMethod) -> TraitItemMethod {
TraitItemMethod {
attrs: m.attrs,
sig: m.sig,
default: None,
semi_token: Some(Token)),
}
}
fn convert_constant(c: ImplItemConst) -> TraitItemConst {
TraitItemConst {
attrs: c.attrs,
const_token: c.const_token,
ident: c.ident,
colon_token: c.colon_token,
ty: c.ty,
default: None,
semi_token: c.semi_token,
}
}
fn convert_type(t: ImplItemType) -> TraitItemType {
TraitItemType {
attrs: t.attrs,
type_token: t.type_token,
ident: t.ident,
generics: t.generics,
colon_token: None,
bounds: Punctuated::new(),
default: None,
semi_token: t.semi_token,
}
}
fn convert_macro(m: ImplItemMacro) -> TraitItemMacro {
TraitItemMacro {
attrs: m.attrs,
mac: m.mac,
semi_token: m.semi_token,
}
}
fn convert_item(i: ImplItem) -> TraitItem {
match i {
ImplItem::Const(c) => TraitItem::Const(convert_constant(c)),
ImplItem::Method(m) => TraitItem::Method(convert_method(m)),
ImplItem::Type(t) => TraitItem::Type(convert_type(t)),
ImplItem::Macro(m) => TraitItem::Macro(convert_macro(m)),
ImplItem::Verbatim(s) => TraitItem::Verbatim(s),
// at the time of writing this, all valid ImplItems are covered above
i => unimplemented!("Unsupported item: {}", i.into_token_stream()),
}
}
/// Make a trait out of the inherent impl
pub fn to_trait(i: ItemImpl, vis: Visibility, trait_ident: Ident) -> ItemTrait {
ItemTrait {
attrs: i.attrs,
vis,
unsafety: i.unsafety,
auto_token: None,
trait_token: Token),
ident: trait_ident,
generics: i.generics,
colon_token: None,
supertraits: Punctuated::new(),
brace_token: i.brace_token,
items: i.items.into_iter().map(convert_item).collect(),
}
}
|
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::net::SocketAddr;
use tiny_http::{Response, Server};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ServerInfo {
name: String,
player_count: u8,
max_players: u8,
description: String,
map: String,
port: u16,
#[serde(skip)]
update_time: Option<std::time::Instant>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ServerList(HashMap<SocketAddr, ServerInfo>);
fn main() {
let server = Server::http("0.0.0.0:3692").unwrap();
let mut server_list = ServerList(HashMap::new());
for mut request in server.incoming_requests() {
for (k, server) in server_list.0.clone() {
if server.update_time.unwrap().elapsed().as_secs() > 10 {
server_list.0.remove(&k);
}
}
if request.method() == &tiny_http::Method::Post {
let addr = request.remote_addr().clone();
let mut content = String::new();
let _ = request.as_reader().read_to_string(&mut content);
if let Ok(server_info) = serde_json::from_str(&content) {
let mut server_info: ServerInfo = server_info;
server_info.description.truncate(256);
let addr = SocketAddr::new(addr.ip(), server_info.port);
server_info.update_time = Some(std::time::Instant::now());
server_list.0.insert(addr, server_info);
let response = Response::from_string("ok");
let _ = request.respond(response);
} else {
let response = Response::from_string("err");
let _ = request.respond(response);
}
} else {
let response = Response::from_string(serde_json::to_string(&server_list).unwrap());
let _ = request.respond(response);
}
}
}
|
#[doc = "Register `OTPBLR_CUR` reader"]
pub type R = crate::R<OTPBLR_CUR_SPEC>;
#[doc = "Field `LOCKBL` reader - OTP block lock Block n corresponds to OTP 16-bit word 32 x n to 32 x n + 31. LOCKBL\\[n\\]
= 1 indicates that all OTP 16-bit words in OTP Block n are locked and attempt to program them results in WRPERR. LOCKBL\\[n\\]
= 0 indicates that all OTP 16-bit words in OTP Block n are not locked. When one block is locked, it’s not possible to remove the write protection. Also if not locked, it is not possible to erase OTP words."]
pub type LOCKBL_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - OTP block lock Block n corresponds to OTP 16-bit word 32 x n to 32 x n + 31. LOCKBL\\[n\\]
= 1 indicates that all OTP 16-bit words in OTP Block n are locked and attempt to program them results in WRPERR. LOCKBL\\[n\\]
= 0 indicates that all OTP 16-bit words in OTP Block n are not locked. When one block is locked, it’s not possible to remove the write protection. Also if not locked, it is not possible to erase OTP words."]
#[inline(always)]
pub fn lockbl(&self) -> LOCKBL_R {
LOCKBL_R::new(self.bits)
}
}
#[doc = "FLASH non-secure OTP block lock\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`otpblr_cur::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OTPBLR_CUR_SPEC;
impl crate::RegisterSpec for OTPBLR_CUR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`otpblr_cur::R`](R) reader structure"]
impl crate::Readable for OTPBLR_CUR_SPEC {}
#[doc = "`reset()` method sets OTPBLR_CUR to value 0"]
impl crate::Resettable for OTPBLR_CUR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! A module which contains configuration options for a [`Grid`].
//!
//! [`Grid`]: crate::grid::iterable::Grid
mod borders_config;
mod entity_map;
mod formatting;
mod offset;
use std::collections::HashMap;
use crate::color::{AnsiColor, StaticColor};
use crate::config::compact::CompactConfig;
use crate::config::{
AlignmentHorizontal, AlignmentVertical, Border, Borders, Entity, Indent, Position, Sides,
};
use borders_config::BordersConfig;
pub use self::{entity_map::EntityMap, formatting::Formatting, offset::Offset};
/// This structure represents a settings of a grid.
///
/// grid: crate::Grid.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct SpannedConfig {
margin: Sides<ColoredMarginIndent>,
padding: EntityMap<Sides<ColoredIndent>>,
alignment_h: EntityMap<AlignmentHorizontal>,
alignment_v: EntityMap<AlignmentVertical>,
formatting: EntityMap<Formatting>,
span_columns: HashMap<Position, usize>,
span_rows: HashMap<Position, usize>,
borders: BordersConfig<char>,
borders_colors: BordersConfig<AnsiColor<'static>>,
borders_missing_char: char,
horizontal_chars: HashMap<Position, HashMap<Offset, char>>,
horizontal_colors: HashMap<Position, HashMap<Offset, AnsiColor<'static>>>,
vertical_chars: HashMap<Position, HashMap<Offset, char>>,
vertical_colors: HashMap<Position, HashMap<Offset, AnsiColor<'static>>>,
justification: EntityMap<char>,
justification_color: EntityMap<Option<AnsiColor<'static>>>,
}
impl Default for SpannedConfig {
fn default() -> Self {
Self {
margin: Sides::default(),
padding: EntityMap::default(),
formatting: EntityMap::default(),
alignment_h: EntityMap::new(AlignmentHorizontal::Left),
alignment_v: EntityMap::new(AlignmentVertical::Top),
span_columns: HashMap::default(),
span_rows: HashMap::default(),
borders: BordersConfig::default(),
borders_colors: BordersConfig::default(),
borders_missing_char: ' ',
horizontal_chars: HashMap::default(),
horizontal_colors: HashMap::default(),
vertical_chars: HashMap::default(),
vertical_colors: HashMap::default(),
justification: EntityMap::new(' '),
justification_color: EntityMap::default(),
}
}
}
impl SpannedConfig {
/// Set a margin of a grid.
pub fn set_margin(&mut self, margin: Sides<Indent>) {
self.margin.left.indent = margin.left;
self.margin.right.indent = margin.right;
self.margin.top.indent = margin.top;
self.margin.bottom.indent = margin.bottom;
}
/// Set a color of margin of a grid.
pub fn set_margin_color(&mut self, margin: Sides<Option<AnsiColor<'static>>>) {
self.margin.left.color = margin.left;
self.margin.right.color = margin.right;
self.margin.top.color = margin.top;
self.margin.bottom.color = margin.bottom;
}
/// Set an offset of margin of a grid.
pub fn set_margin_offset(&mut self, margin: Sides<Offset>) {
self.margin.left.offset = margin.left;
self.margin.right.offset = margin.right;
self.margin.top.offset = margin.top;
self.margin.bottom.offset = margin.bottom;
}
/// Returns a margin value currently set.
pub fn get_margin(&self) -> Sides<Indent> {
Sides::new(
self.margin.left.indent,
self.margin.right.indent,
self.margin.top.indent,
self.margin.bottom.indent,
)
}
/// Returns a margin color value currently set.
pub fn get_margin_color(&self) -> Sides<Option<AnsiColor<'static>>> {
Sides::new(
self.margin.left.color.clone(),
self.margin.right.color.clone(),
self.margin.top.color.clone(),
self.margin.bottom.color.clone(),
)
}
/// Returns a margin offset value currently set.
pub fn get_margin_offset(&self) -> Sides<Offset> {
Sides::new(
self.margin.left.offset,
self.margin.right.offset,
self.margin.top.offset,
self.margin.bottom.offset,
)
}
/// Clears all theme changes.
/// And sets it to default.
pub fn clear_theme(&mut self) {
self.borders = BordersConfig::default();
self.horizontal_chars.clear();
self.vertical_chars.clear();
self.horizontal_colors.clear();
self.vertical_colors.clear();
}
/// Set the [`Borders`] value as correct one.
pub fn set_borders(&mut self, borders: Borders<char>) {
self.borders.set_borders(borders);
}
/// Gets a global border value if set.
pub fn get_global_border(&self) -> Option<&char> {
self.borders.get_global()
}
/// Set the all [`Borders`] values to a char.
pub fn set_global_border(&mut self, c: char) {
self.borders.set_global(c);
}
/// Returns a current [`Borders`] structure.
pub fn get_borders(&self) -> &Borders<char> {
self.borders.get_borders()
}
/// Set the border line by row index.
///
/// Row `0` means the top row.
/// Row `grid.count_rows()` means the bottom row.
pub fn insert_horizontal_line(&mut self, line: usize, val: HorizontalLine) {
self.borders.insert_horizontal_line(line, val);
}
/// Sets off the border line by row index if any were set
///
/// Row `0` means the top row.
/// Row `grid.count_rows()` means the bottom row.
pub fn remove_horizontal_line(&mut self, line: usize, count_rows: usize) {
self.borders.remove_horizontal_line(line, count_rows);
}
/// Gets a overridden vertical line.
///
/// Row `0` means the left row.
/// Row `grid.count_columns()` means the right most row.
pub fn get_vertical_line(&self, line: usize) -> Option<&VerticalLine> {
self.borders.get_vertical_line(line)
}
/// Set the border line by column index.
///
/// Row `0` means the left row.
/// Row `grid.count_columns()` means the right most row.
pub fn insert_vertical_line(&mut self, line: usize, val: VerticalLine) {
self.borders.insert_vertical_line(line, val);
}
/// Sets off the border line by column index if any were set
///
/// Row `0` means the left row.
/// Row `grid.count_columns()` means the right most row.
pub fn remove_vertical_line(&mut self, line: usize, count_columns: usize) {
self.borders.remove_vertical_line(line, count_columns);
}
/// Gets a overridden line.
///
/// Row `0` means the top row.
/// Row `grid.count_rows()` means the bottom row.
pub fn get_horizontal_line(&self, line: usize) -> Option<&HorizontalLine> {
self.borders.get_horizontal_line(line)
}
/// Override a character on a horizontal line.
///
/// If borders are not set the char won't be used.
///
/// It takes not cell position but line as row and column of a cell;
/// So its range is line <= count_rows && col < count_columns.
pub fn set_horizontal_char(&mut self, pos: Position, c: char, offset: Offset) {
let chars = self
.horizontal_chars
.entry(pos)
.or_insert_with(|| HashMap::with_capacity(1));
chars.insert(offset, c);
}
/// Get a list of overridden chars in a horizontal border.
///
/// It takes not cell position but line as row and column of a cell;
/// So its range is line <= count_rows && col < count_columns.
pub fn lookup_horizontal_char(&self, pos: Position, offset: usize, end: usize) -> Option<char> {
self.horizontal_chars
.get(&pos)
.and_then(|chars| {
chars.get(&Offset::Begin(offset)).or_else(|| {
if end > offset {
if end == 0 {
chars.get(&Offset::End(0))
} else {
chars.get(&Offset::End(end - offset - 1))
}
} else {
None
}
})
})
.copied()
}
/// Checks if there any char in a horizontal border being overridden.
///
/// It takes not cell position but line as row and column of a cell;
/// So its range is line <= count_rows && col < count_columns.
pub fn is_overridden_horizontal(&self, pos: Position) -> bool {
self.horizontal_chars.get(&pos).is_some()
}
/// Removes a list of overridden chars in a horizontal border.
///
/// It takes not cell position but line as row and column of a cell;
/// So its range is line <= count_rows && col < count_columns.
pub fn remove_overridden_horizontal(&mut self, pos: Position) {
self.horizontal_chars.remove(&pos);
}
/// Override a vertical split line.
///
/// If borders are not set the char won't be used.
///
/// It takes not cell position but cell row and column of a line;
/// So its range is row < count_rows && col <= count_columns.
pub fn set_vertical_char(&mut self, pos: Position, c: char, offset: Offset) {
let chars = self
.vertical_chars
.entry(pos)
.or_insert_with(|| HashMap::with_capacity(1));
chars.insert(offset, c);
}
/// Get a list of overridden chars in a horizontal border.
///
/// It takes not cell position but cell row and column of a line;
/// So its range is row < count_rows && col <= count_columns.
pub fn lookup_vertical_char(&self, pos: Position, offset: usize, end: usize) -> Option<char> {
self.vertical_chars
.get(&pos)
.and_then(|chars| {
chars.get(&Offset::Begin(offset)).or_else(|| {
if end > offset {
if end == 0 {
chars.get(&Offset::End(0))
} else {
chars.get(&Offset::End(end - offset - 1))
}
} else {
None
}
})
})
.copied()
}
/// Checks if there any char in a horizontal border being overridden.
///
/// It takes not cell position but cell row and column of a line;
/// So its range is row < count_rows && col <= count_columns.
pub fn is_overridden_vertical(&self, pos: Position) -> bool {
self.vertical_chars.get(&pos).is_some()
}
/// Removes a list of overridden chars in a horizontal border.
///
/// It takes not cell position but cell row and column of a line;
/// So its range is row < count_rows && col <= count_columns.
pub fn remove_overridden_vertical(&mut self, pos: Position) {
self.vertical_chars.remove(&pos);
}
/// Override a character color on a horizontal line.
pub fn set_horizontal_color(&mut self, pos: Position, c: AnsiColor<'static>, offset: Offset) {
let chars = self
.horizontal_colors
.entry(pos)
.or_insert_with(|| HashMap::with_capacity(1));
chars.insert(offset, c);
}
/// Get a overridden color in a horizontal border.
pub fn lookup_horizontal_color(
&self,
pos: Position,
offset: usize,
end: usize,
) -> Option<&AnsiColor<'static>> {
self.horizontal_colors.get(&pos).and_then(|chars| {
chars.get(&Offset::Begin(offset)).or_else(|| {
if end > offset {
if end == 0 {
chars.get(&Offset::End(0))
} else {
chars.get(&Offset::End(end - offset - 1))
}
} else {
None
}
})
})
}
/// Override a character color on a vertical line.
pub fn set_vertical_color(&mut self, pos: Position, c: AnsiColor<'static>, offset: Offset) {
let chars = self
.vertical_colors
.entry(pos)
.or_insert_with(|| HashMap::with_capacity(1));
chars.insert(offset, c);
}
/// Get a overridden color in a vertical border.
pub fn lookup_vertical_color(
&self,
pos: Position,
offset: usize,
end: usize,
) -> Option<&AnsiColor<'static>> {
self.vertical_colors.get(&pos).and_then(|chars| {
chars.get(&Offset::Begin(offset)).or_else(|| {
if end > offset {
if end == 0 {
chars.get(&Offset::End(0))
} else {
chars.get(&Offset::End(end - offset - 1))
}
} else {
None
}
})
})
}
/// Set a padding to a given cells.
pub fn set_padding(&mut self, entity: Entity, padding: Sides<Indent>) {
let mut pad = self.padding.get(entity).clone();
pad.left.indent = padding.left;
pad.right.indent = padding.right;
pad.top.indent = padding.top;
pad.bottom.indent = padding.bottom;
self.padding.insert(entity, pad);
}
/// Set a padding to a given cells.
pub fn set_padding_color(
&mut self,
entity: Entity,
padding: Sides<Option<AnsiColor<'static>>>,
) {
let mut pad = self.padding.get(entity).clone();
pad.left.color = padding.left;
pad.right.color = padding.right;
pad.top.color = padding.top;
pad.bottom.color = padding.bottom;
self.padding.insert(entity, pad);
}
/// Get a padding for a given [Entity].
pub fn get_padding(&self, entity: Entity) -> Sides<Indent> {
let pad = self.padding.get(entity);
Sides::new(
pad.left.indent,
pad.right.indent,
pad.top.indent,
pad.bottom.indent,
)
}
/// Get a padding color for a given [Entity].
pub fn get_padding_color(&self, entity: Entity) -> Sides<Option<AnsiColor<'static>>> {
let pad = self.padding.get(entity);
Sides::new(
pad.left.color.clone(),
pad.right.color.clone(),
pad.top.color.clone(),
pad.bottom.color.clone(),
)
}
/// Set a formatting to a given cells.
pub fn set_formatting(&mut self, entity: Entity, formatting: Formatting) {
self.formatting.insert(entity, formatting);
}
/// Get a formatting settings for a given [Entity].
pub fn get_formatting(&self, entity: Entity) -> &Formatting {
self.formatting.get(entity)
}
/// Set a vertical alignment to a given cells.
pub fn set_alignment_vertical(&mut self, entity: Entity, alignment: AlignmentVertical) {
self.alignment_v.insert(entity, alignment);
}
/// Get a vertical alignment for a given [Entity].
pub fn get_alignment_vertical(&self, entity: Entity) -> &AlignmentVertical {
self.alignment_v.get(entity)
}
/// Set a horizontal alignment to a given cells.
pub fn set_alignment_horizontal(&mut self, entity: Entity, alignment: AlignmentHorizontal) {
self.alignment_h.insert(entity, alignment);
}
/// Get a horizontal alignment for a given [Entity].
pub fn get_alignment_horizontal(&self, entity: Entity) -> &AlignmentHorizontal {
self.alignment_h.get(entity)
}
/// Set border set a border value to all cells in [`Entity`].
pub fn set_border(&mut self, pos: Position, border: Border<char>) {
self.borders.insert_border(pos, border);
}
/// Returns a border of a cell.
pub fn get_border(&self, pos: Position, shape: (usize, usize)) -> Border<char> {
self.borders.get_border(pos, shape).copied()
}
/// Returns a border color of a cell.
pub fn get_border_color(
&self,
pos: Position,
shape: (usize, usize),
) -> Border<&AnsiColor<'static>> {
self.borders_colors.get_border(pos, shape)
}
/// Set a character which will be used in case any misconfiguration of borders.
///
/// It will be usde for example when you set a left char for border frame and top but didn't set a top left corner.
pub fn set_borders_missing(&mut self, c: char) {
self.borders_missing_char = c;
}
/// Get a character which will be used in case any misconfiguration of borders.
pub fn get_borders_missing(&self) -> char {
self.borders_missing_char
}
/// Gets a color of all borders on the grid.
pub fn get_border_color_global(&self) -> Option<&AnsiColor<'static>> {
self.borders_colors.get_global()
}
/// Sets a color of all borders on the grid.
pub fn set_border_color_global(&mut self, clr: AnsiColor<'static>) {
self.borders_colors = BordersConfig::default();
self.borders_colors.set_global(clr);
}
/// Gets colors of a borders carcass on the grid.
pub fn get_color_borders(&self) -> &Borders<AnsiColor<'static>> {
self.borders_colors.get_borders()
}
/// Sets colors of border carcass on the grid.
pub fn set_borders_color(&mut self, clrs: Borders<AnsiColor<'static>>) {
self.borders_colors.set_borders(clrs);
}
/// Sets a color of border of a cell on the grid.
pub fn set_border_color(&mut self, pos: Position, border: Border<AnsiColor<'static>>) {
self.borders_colors.insert_border(pos, border)
}
/// Sets off all borders possible on the [`Entity`].
///
/// It doesn't changes globally set borders through [`SpannedConfig::set_borders`].
//
// todo: would be great to remove a shape
pub fn remove_border(&mut self, pos: Position, shape: (usize, usize)) {
self.borders.remove_border(pos, shape);
}
/// Gets a color of border of a cell on the grid.
//
// todo: would be great to remove a shape
pub fn remove_border_color(&mut self, pos: Position, shape: (usize, usize)) {
self.borders_colors.remove_border(pos, shape);
}
/// Get a justification which will be used while expanding cells width/height.
pub fn get_justification(&self, entity: Entity) -> char {
*self.justification.get(entity)
}
/// Get a justification color which will be used while expanding cells width/height.
///
/// `None` means no color.
pub fn get_justification_color(&self, entity: Entity) -> Option<&AnsiColor<'static>> {
self.justification_color.get(entity).as_ref()
}
/// Set a justification which will be used while expanding cells width/height.
pub fn set_justification(&mut self, entity: Entity, c: char) {
self.justification.insert(entity, c);
}
/// Set a justification color which will be used while expanding cells width/height.
///
/// `None` removes it.
pub fn set_justification_color(&mut self, entity: Entity, color: Option<AnsiColor<'static>>) {
self.justification_color.insert(entity, color);
}
/// Get a span value of the cell, if any is set.
pub fn get_column_spans(&self) -> HashMap<Position, usize> {
self.span_columns.clone()
}
/// Get a span value of the cell, if any is set.
pub fn get_row_spans(&self) -> HashMap<Position, usize> {
self.span_rows.clone()
}
/// Get a span value of the cell, if any is set.
pub fn get_column_span(&self, pos: Position) -> Option<usize> {
self.span_columns.get(&pos).copied()
}
/// Get a span value of the cell, if any is set.
pub fn get_row_span(&self, pos: Position) -> Option<usize> {
self.span_rows.get(&pos).copied()
}
/// Removes column spans.
pub fn remove_column_spans(&mut self) {
self.span_columns.clear()
}
/// Removes row spans.
pub fn remove_row_spans(&mut self) {
self.span_rows.clear()
}
/// Set a column span to a given cells.
///
/// BEWARE
///
/// IT'S CALLER RESPONSIBILITY TO MAKE SURE
/// THAT THERE NO INTERSECTIONS IN PLACE AND THE SPAN VALUE IS CORRECT
pub fn set_column_span(&mut self, pos: Position, span: usize) {
set_cell_column_span(self, pos, span);
}
/// Verifies if there's any spans set.
pub fn has_column_spans(&self) -> bool {
!self.span_columns.is_empty()
}
/// Set a column span to a given cells.
///
/// BEWARE
///
/// IT'S CALLER RESPONSIBILITY TO MAKE SURE
/// THAT THERE NO INTERSECTIONS IN PLACE AND THE SPAN VALUE IS CORRECT
pub fn set_row_span(&mut self, pos: Position, span: usize) {
set_cell_row_span(self, pos, span);
}
/// Verifies if there's any spans set.
pub fn has_row_spans(&self) -> bool {
!self.span_rows.is_empty()
}
/// Gets an intersection character which would be rendered on the grid.
///
/// grid: crate::Grid
pub fn get_intersection(&self, pos: Position, shape: (usize, usize)) -> Option<char> {
let c = self.borders.get_intersection(pos, shape);
if let Some(c) = c {
return Some(*c);
}
if self.has_horizontal(pos.0, shape.0) && self.has_vertical(pos.1, shape.1) {
return Some(self.get_borders_missing());
}
None
}
/// Gets a horizontal character which would be rendered on the grid.
///
/// grid: crate::Grid
pub fn get_horizontal(&self, pos: Position, count_rows: usize) -> Option<char> {
let c = self.borders.get_horizontal(pos, count_rows);
if let Some(c) = c {
return Some(*c);
}
if self.has_horizontal(pos.0, count_rows) {
return Some(self.get_borders_missing());
}
None
}
/// Gets a vertical character which would be rendered on the grid.
///
/// grid: crate::Grid
pub fn get_vertical(&self, pos: Position, count_columns: usize) -> Option<char> {
if let Some(c) = self.borders.get_vertical(pos, count_columns) {
return Some(*c);
}
if self.has_vertical(pos.1, count_columns) {
return Some(self.get_borders_missing());
}
None
}
/// Gets a color of a cell horizontal.
pub fn get_horizontal_color(
&self,
pos: Position,
count_rows: usize,
) -> Option<&AnsiColor<'static>> {
self.borders_colors.get_horizontal(pos, count_rows)
}
/// Gets a color of a cell vertical.
pub fn get_vertical_color(
&self,
pos: Position,
count_columns: usize,
) -> Option<&AnsiColor<'static>> {
self.borders_colors.get_vertical(pos, count_columns)
}
/// Gets a color of a cell vertical.
pub fn get_intersection_color(
&self,
pos: Position,
shape: (usize, usize),
) -> Option<&AnsiColor<'static>> {
self.borders_colors.get_intersection(pos, shape)
}
/// Checks if grid would have a horizontal border with the current configuration.
///
/// grid: crate::Grid
pub fn has_horizontal(&self, row: usize, count_rows: usize) -> bool {
self.borders.has_horizontal(row, count_rows)
}
/// Checks if grid would have a vertical border with the current configuration.
///
/// grid: crate::Grid
pub fn has_vertical(&self, col: usize, count_columns: usize) -> bool {
self.borders.has_vertical(col, count_columns)
}
/// Calculates an amount of horizontal lines would present on the grid.
///
/// grid: crate::Grid
pub fn count_horizontal(&self, count_rows: usize) -> usize {
(0..=count_rows)
.filter(|&row| self.has_horizontal(row, count_rows))
.count()
}
/// Calculates an amount of vertical lines would present on the grid.
///
/// grid: crate::Grid
pub fn count_vertical(&self, count_columns: usize) -> usize {
(0..=count_columns)
.filter(|&col| self.has_vertical(col, count_columns))
.count()
}
/// The function returns whether the cells will be rendered or it will be hidden because of a span.
pub fn is_cell_visible(&self, pos: Position) -> bool {
!(self.is_cell_covered_by_column_span(pos)
|| self.is_cell_covered_by_row_span(pos)
|| self.is_cell_covered_by_both_spans(pos))
}
/// The function checks if a cell is hidden because of a row span.
pub fn is_cell_covered_by_row_span(&self, pos: Position) -> bool {
is_cell_covered_by_row_span(self, pos)
}
/// The function checks if a cell is hidden because of a column span.
pub fn is_cell_covered_by_column_span(&self, pos: Position) -> bool {
is_cell_covered_by_column_span(self, pos)
}
/// The function checks if a cell is hidden indirectly because of a row and column span combination.
pub fn is_cell_covered_by_both_spans(&self, pos: Position) -> bool {
is_cell_covered_by_both_spans(self, pos)
}
}
impl From<CompactConfig> for SpannedConfig {
fn from(compact: CompactConfig) -> Self {
use Entity::Global;
let mut cfg = Self::default();
cfg.set_padding(Global, *compact.get_padding());
cfg.set_padding_color(Global, to_ansi_color(compact.get_padding_color()));
cfg.set_margin(*compact.get_margin());
cfg.set_margin_color(to_ansi_color(compact.get_margin_color()));
cfg.set_alignment_horizontal(Global, compact.get_alignment_horizontal());
cfg.set_borders(*compact.get_borders());
cfg.set_borders_color(borders_static_color_to_ansi_color(
*compact.get_borders_color(),
));
if let Some(line) = compact.get_first_horizontal_line() {
cfg.insert_horizontal_line(
1,
HorizontalLine {
intersection: line.intersection,
left: line.connect1,
right: line.connect2,
main: Some(line.main),
},
);
}
cfg
}
}
fn to_ansi_color(b: Sides<StaticColor>) -> Sides<Option<AnsiColor<'static>>> {
Sides::new(
Some(b.left.into()),
Some(b.right.into()),
Some(b.top.into()),
Some(b.bottom.into()),
)
}
fn borders_static_color_to_ansi_color(b: Borders<StaticColor>) -> Borders<AnsiColor<'static>> {
Borders {
left: b.left.map(|c| c.into()),
right: b.right.map(|c| c.into()),
top: b.top.map(|c| c.into()),
bottom: b.bottom.map(|c| c.into()),
bottom_intersection: b.bottom_intersection.map(|c| c.into()),
bottom_left: b.bottom_left.map(|c| c.into()),
bottom_right: b.bottom_right.map(|c| c.into()),
horizontal: b.horizontal.map(|c| c.into()),
intersection: b.intersection.map(|c| c.into()),
left_intersection: b.left_intersection.map(|c| c.into()),
right_intersection: b.right_intersection.map(|c| c.into()),
top_intersection: b.top_intersection.map(|c| c.into()),
top_left: b.top_left.map(|c| c.into()),
top_right: b.top_right.map(|c| c.into()),
vertical: b.vertical.map(|c| c.into()),
}
}
fn set_cell_row_span(cfg: &mut SpannedConfig, pos: Position, span: usize) {
// such spans aren't supported
if span == 0 {
return;
}
// It's a default span so we can do nothing.
// but we check if it's an override of a span.
if span == 1 {
cfg.span_rows.remove(&pos);
return;
}
cfg.span_rows.insert(pos, span);
}
fn set_cell_column_span(cfg: &mut SpannedConfig, pos: Position, span: usize) {
// such spans aren't supported
if span == 0 {
return;
}
// It's a default span so we can do nothing.
// but we check if it's an override of a span.
if span == 1 {
cfg.span_columns.remove(&pos);
return;
}
cfg.span_columns.insert(pos, span);
}
fn is_cell_covered_by_column_span(cfg: &SpannedConfig, pos: Position) -> bool {
cfg.span_columns
.iter()
.any(|(&(row, col), span)| pos.1 > col && pos.1 < col + span && row == pos.0)
}
fn is_cell_covered_by_row_span(cfg: &SpannedConfig, pos: Position) -> bool {
cfg.span_rows
.iter()
.any(|(&(row, col), span)| pos.0 > row && pos.0 < row + span && col == pos.1)
}
fn is_cell_covered_by_both_spans(cfg: &SpannedConfig, pos: Position) -> bool {
if !cfg.has_column_spans() || !cfg.has_row_spans() {
return false;
}
cfg.span_rows.iter().any(|(p1, row_span)| {
cfg.span_columns
.iter()
.filter(|(p2, _)| &p1 == p2)
.any(|(_, col_span)| {
pos.0 > p1.0 && pos.0 < p1.0 + row_span && pos.1 > p1.1 && pos.1 < p1.1 + col_span
})
})
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
struct ColoredIndent {
indent: Indent,
color: Option<AnsiColor<'static>>,
}
/// A colorefull margin indent.
#[derive(Debug, Clone, PartialEq, Eq)]
struct ColoredMarginIndent {
/// An indent value.
indent: Indent,
/// An offset value.
offset: Offset,
/// An color value.
color: Option<AnsiColor<'static>>,
}
impl Default for ColoredMarginIndent {
fn default() -> Self {
Self {
indent: Indent::default(),
offset: Offset::Begin(0),
color: None,
}
}
}
/// HorizontalLine represents a horizontal border line.
pub type HorizontalLine = borders_config::HorizontalLine<char>;
/// HorizontalLine represents a vertical border line.
pub type VerticalLine = borders_config::VerticalLine<char>;
|
use gl;
use std;
//TODO
//Do a derive(GLResource) that adds gl_handle to types.
//Is that even possible?
#[derive(Debug, Default)]
pub struct VertexArrayObj {
gl_handle: u32,
}
#[derive(Debug, Default)]
pub struct VertexBufferObj {
gl_handle: u32,
}
impl VertexBufferObj {
pub fn new() -> VertexBufferObj {
let mut vbo = VertexBufferObj { gl_handle: 0 };
unsafe {
gl::GenBuffers(1, &mut vbo.gl_handle);
};
vbo
}
pub fn set_buffer_data_from_raw_ptr(&mut self, data: *const std::os::raw::c_void, size: isize) {
unsafe {
gl::BindBuffer(gl::ARRAY_BUFFER, self.gl_handle);
gl::BufferData(gl::ARRAY_BUFFER, size as isize, data, gl::STATIC_DRAW);
}
}
pub fn set_buffer_data(&mut self, data: &[f32]) {
unsafe {
gl::BindBuffer(gl::ARRAY_BUFFER, self.gl_handle);
gl::BufferData(
gl::ARRAY_BUFFER,
(std::mem::size_of::<f32>() * data.len()) as isize,
data.as_ptr() as *const _,
gl::STATIC_DRAW,
);
}
}
//Enable vertex attrib at location and describe the data there
//This is the same as "location = bla", in the vertex shader code.
pub fn describe_data(&self, location: u32, component_count: i32, stride: usize, offset: usize) {
unsafe {
gl::EnableVertexAttribArray(location);
gl::VertexAttribPointer(
location,
component_count,
gl::FLOAT,
gl::FALSE,
stride as i32,
offset as *const _,
);
}
}
pub fn gl_handle(&self) -> u32 {
self.gl_handle
}
}
impl VertexArrayObj {
pub fn new() -> VertexArrayObj {
let mut vao: VertexArrayObj = VertexArrayObj::default();
unsafe {
gl::GenVertexArrays(1, &mut vao.gl_handle);
}
vao
}
pub fn bind(&self) {
unsafe {
gl::BindVertexArray(self.gl_handle);
}
}
pub fn unbind(&self) {
unsafe {
gl::BindVertexArray(0);
}
}
}
impl std::ops::Drop for VertexArrayObj {
fn drop(&mut self) {
unsafe {
gl::DeleteVertexArrays(1, &self.gl_handle);
}
}
}
impl std::ops::Drop for VertexBufferObj {
fn drop(&mut self) {
unsafe {
gl::DeleteBuffers(1, &self.gl_handle);
}
}
}
|
/// bindings for ARINC653P1-5 3.7.2.4 events
pub mod basic {
use crate::bindings::*;
use crate::Locked;
/// ARINC653P1-5 3.7.1
pub type EventName = ApexName;
/// ARINC653P1-5 3.7.1
///
/// According to ARINC 653P1-5 this may either be 32 or 64 bits.
/// Internally we will use 64-bit by default.
/// The implementing Hypervisor may cast this to 32-bit if needed
pub type EventId = ApexLongInteger;
/// ARINC653P1-5 3.7.1
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "strum", derive(strum::FromRepr))]
pub enum EventState {
/// inactive
Down = 0,
/// active
Up = 1,
}
/// ARINC653P1-5 3.7.1
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct EventStatus {
pub event_state: EventState,
pub waiting_processes: WaitingRange,
}
/// ARINC653P1-5 3.7.2.4 required functions for event functionality
pub trait ApexEventP1 {
/// APEX653P1-5 3.7.2.4.1
///
/// # Errors
/// - [ErrorReturnCode::InvalidConfig]: not enough memory is available
/// - [ErrorReturnCode::InvalidConfig]: [ApexLimits::SYSTEM_LIMIT_NUMBER_OF_EVENTS](crate::bindings::ApexLimits::SYSTEM_LIMIT_NUMBER_OF_EVENTS) was reached
/// - [ErrorReturnCode::NoAction]: an event with given `event_name` already exists
/// - [ErrorReturnCode::InvalidMode]: our current operating mode is [OperatingMode::Normal](crate::prelude::OperatingMode::Normal)
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn create_event<L: Locked>(event_name: EventName) -> Result<EventId, ErrorReturnCode>;
/// APEX653P1-5 3.7.2.4.2 changes events state to [EventState::Up]
///
/// # Errors
/// - [ErrorReturnCode::InvalidParam]: event with `event_id` does not exist
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn set_event<L: Locked>(event_id: EventId) -> Result<(), ErrorReturnCode>;
/// APEX653P1-5 3.7.2.4.3 changes events state to [EventState::Down]
///
/// # Errors
/// - [ErrorReturnCode::InvalidParam]: event with `event_id` does not exist
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn reset_event<L: Locked>(event_id: EventId) -> Result<(), ErrorReturnCode>;
/// APEX653P1-5 3.7.2.4.4
///
/// # Errors
/// - [ErrorReturnCode::InvalidParam]: event with `event_id` does not exist
/// - [ErrorReturnCode::InvalidParam]: `time_out` is invalid
/// - [ErrorReturnCode::InvalidMode]: current process holds a mutex
/// - [ErrorReturnCode::InvalidMode]: current process is error handler AND `time_out` is not instant.
/// - [ErrorReturnCode::NotAvailable]: `time_out` is instant AND event is [EventState::Down]
/// - [ErrorReturnCode::TimedOut]: `time_out` elapsed
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn wait_event<L: Locked>(
event_id: EventId,
time_out: ApexSystemTime,
) -> Result<(), ErrorReturnCode>;
/// APEX653P1-5 3.7.2.4.5
///
/// # Errors
/// - [ErrorReturnCode::InvalidConfig]: event with `event_name` does not exist
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn get_event_id<L: Locked>(event_name: EventName) -> Result<EventId, ErrorReturnCode>;
/// APEX653P1-5 3.7.2.4.6
///
/// # Errors
/// - [ErrorReturnCode::InvalidParam]: event with `event_id` does not exist
#[cfg_attr(not(feature = "full_doc"), doc(hidden))]
fn get_event_status<L: Locked>(event_id: EventId) -> Result<EventStatus, ErrorReturnCode>;
}
}
/// abstraction for ARINC653P1-5 3.7.2.4 events
pub mod abstraction {
use core::marker::PhantomData;
use core::sync::atomic::AtomicPtr;
// Reexport important basic-types for downstream-user
pub use super::basic::{EventId, EventState, EventStatus};
use crate::bindings::*;
use crate::hidden::Key;
use crate::prelude::*;
/// Event abstraction struct
#[derive(Debug)]
pub struct Event<E: ApexEventP1> {
_b: PhantomData<AtomicPtr<E>>,
id: EventId,
}
impl<E: ApexEventP1> Clone for Event<E> {
fn clone(&self) -> Self {
Self {
_b: self._b,
id: self.id,
}
}
}
/// Free extra functions for implementer of [ApexEventP1]
pub trait ApexEventP1Ext: ApexEventP1 + Sized {
/// # Errors
/// - [Error::InvalidConfig]: event with `name` does not exist
fn get_event(name: Name) -> Result<Event<Self>, Error>;
}
impl<E: ApexEventP1> ApexEventP1Ext for E {
fn get_event(name: Name) -> Result<Event<E>, Error> {
let id = E::get_event_id::<Key>(name.into())?;
Ok(Event {
_b: Default::default(),
id,
})
}
}
impl<E: ApexEventP1> Event<E> {
/// # Errors
/// - [Error::InvalidConfig]: event with `name` does not exist
pub fn from_name(name: Name) -> Result<Event<E>, Error> {
E::get_event(name)
}
pub fn id(&self) -> EventId {
self.id
}
/// Change to [EventState::Up]
///
/// # Panics
/// if this event does not exist anymore
pub fn set(&self) {
// According to ARINC653P1-5 3.7.2.4.2 this can only fail if the event_id
// does not exist in the current partition.
// But since we retrieve the event_id directly from the hypervisor
// there is no possible way for it not existing
E::set_event::<Key>(self.id).unwrap();
}
/// Change to [EventState::Down]
///
/// # Panics
/// if this event does not exist anymore
pub fn reset(&self) {
// According to ARINC653P1-5 3.7.2.4.3 this can only fail if the event_id
// does not exist in the current partition.
// But since we retrieve the event_id directly from the hypervisor
// there is no possible way for it not existing
E::reset_event::<Key>(self.id).unwrap();
}
/// wait for this event to occur
///
/// # Errors
/// - [Error::InvalidParam]: `timeout` is invalid
/// - [Error::InvalidMode]: current process holds a mutex
/// - [Error::InvalidMode]: current process is error handler AND `timeout` is not instant.
/// - [Error::NotAvailable]: `timeout` is instant AND event is [EventState::Down]
/// - [Error::TimedOut]: `timeout` elapsed
pub fn wait(&self, timeout: SystemTime) -> Result<(), Error> {
E::wait_event::<Key>(self.id, timeout.into())?;
Ok(())
}
/// get current event status
///
/// # Panics
/// if this event does not exist anymore
pub fn status(&self) -> EventStatus {
// According to ARINC653P1-5 3.7.2.4.6 this can only fail if the event_id
// does not exist in the current partition.
// But since we retrieve the event_id directly from the hypervisor
// there is no possible way for it not existing
E::get_event_status::<Key>(self.id).unwrap()
}
}
impl<E: ApexEventP1> StartContext<E> {
/// # Errors
/// - [Error::InvalidConfig]: not enough memory is available
/// - [Error::InvalidConfig]: [ApexLimits::SYSTEM_LIMIT_NUMBER_OF_EVENTS](crate::bindings::ApexLimits::SYSTEM_LIMIT_NUMBER_OF_EVENTS) was reached
/// - [Error::NoAction]: an event with given `name` already exists
pub fn create_event(&mut self, name: Name) -> Result<Event<E>, Error> {
let id = E::create_event::<Key>(name.into())?;
Ok(Event {
_b: Default::default(),
id,
})
}
}
}
|
pub mod tools;
pub mod myerror;
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PADREGL {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `PAD47FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD47FNCSELR {
#[doc = "Configure as the 32kHz output clock from the crystal value."]
_32KHZXT,
#[doc = "IOM/MSPI nCE group 47 value."]
NCE47,
#[doc = "CTIMER connection 26 value."]
CT26,
#[doc = "Configure as GPIO47 value."]
GPIO47,
#[doc = "Configure as the IOMSTR5 SPI MOSI output signal value."]
M5MOSI,
#[doc = "Configure as the UART1 RX input signal value."]
UART1RX,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD47FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD47FNCSELR::_32KHZXT => 0,
PAD47FNCSELR::NCE47 => 1,
PAD47FNCSELR::CT26 => 2,
PAD47FNCSELR::GPIO47 => 3,
PAD47FNCSELR::M5MOSI => 5,
PAD47FNCSELR::UART1RX => 6,
PAD47FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD47FNCSELR {
match value {
0 => PAD47FNCSELR::_32KHZXT,
1 => PAD47FNCSELR::NCE47,
2 => PAD47FNCSELR::CT26,
3 => PAD47FNCSELR::GPIO47,
5 => PAD47FNCSELR::M5MOSI,
6 => PAD47FNCSELR::UART1RX,
i => PAD47FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `_32KHZXT`"]
#[inline]
pub fn is_32k_hz_xt(&self) -> bool {
*self == PAD47FNCSELR::_32KHZXT
}
#[doc = "Checks if the value of the field is `NCE47`"]
#[inline]
pub fn is_nce47(&self) -> bool {
*self == PAD47FNCSELR::NCE47
}
#[doc = "Checks if the value of the field is `CT26`"]
#[inline]
pub fn is_ct26(&self) -> bool {
*self == PAD47FNCSELR::CT26
}
#[doc = "Checks if the value of the field is `GPIO47`"]
#[inline]
pub fn is_gpio47(&self) -> bool {
*self == PAD47FNCSELR::GPIO47
}
#[doc = "Checks if the value of the field is `M5MOSI`"]
#[inline]
pub fn is_m5mosi(&self) -> bool {
*self == PAD47FNCSELR::M5MOSI
}
#[doc = "Checks if the value of the field is `UART1RX`"]
#[inline]
pub fn is_uart1rx(&self) -> bool {
*self == PAD47FNCSELR::UART1RX
}
}
#[doc = "Possible values of the field `PAD47STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD47STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD47STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD47STRNGR::LOW => false,
PAD47STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD47STRNGR {
match value {
false => PAD47STRNGR::LOW,
true => PAD47STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD47STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD47STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD47INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD47INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD47INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD47INPENR::DIS => false,
PAD47INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD47INPENR {
match value {
false => PAD47INPENR::DIS,
true => PAD47INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD47INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD47INPENR::EN
}
}
#[doc = "Possible values of the field `PAD47PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD47PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD47PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD47PULLR::DIS => false,
PAD47PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD47PULLR {
match value {
false => PAD47PULLR::DIS,
true => PAD47PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD47PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD47PULLR::EN
}
}
#[doc = "Possible values of the field `PAD46FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD46FNCSELR {
#[doc = "Configure as the 32kHz output clock from the crystal value."]
_32KHZ_XT,
#[doc = "IOM/MSPI nCE group 46 value."]
NCE46,
#[doc = "CTIMER connection 24 value."]
CT24,
#[doc = "Configure as GPIO46 value."]
GPIO46,
#[doc = "SCARD reset output value."]
SCCRST,
#[doc = "PDM serial clock output value."]
PDMCLK,
#[doc = "Configure as the UART1 TX output signal value."]
UART1TX,
#[doc = "Configure as the serial wire debug SWO signal value."]
SWO,
}
impl PAD46FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD46FNCSELR::_32KHZ_XT => 0,
PAD46FNCSELR::NCE46 => 1,
PAD46FNCSELR::CT24 => 2,
PAD46FNCSELR::GPIO46 => 3,
PAD46FNCSELR::SCCRST => 4,
PAD46FNCSELR::PDMCLK => 5,
PAD46FNCSELR::UART1TX => 6,
PAD46FNCSELR::SWO => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD46FNCSELR {
match value {
0 => PAD46FNCSELR::_32KHZ_XT,
1 => PAD46FNCSELR::NCE46,
2 => PAD46FNCSELR::CT24,
3 => PAD46FNCSELR::GPIO46,
4 => PAD46FNCSELR::SCCRST,
5 => PAD46FNCSELR::PDMCLK,
6 => PAD46FNCSELR::UART1TX,
7 => PAD46FNCSELR::SWO,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `_32KHZ_XT`"]
#[inline]
pub fn is_32khz_xt(&self) -> bool {
*self == PAD46FNCSELR::_32KHZ_XT
}
#[doc = "Checks if the value of the field is `NCE46`"]
#[inline]
pub fn is_nce46(&self) -> bool {
*self == PAD46FNCSELR::NCE46
}
#[doc = "Checks if the value of the field is `CT24`"]
#[inline]
pub fn is_ct24(&self) -> bool {
*self == PAD46FNCSELR::CT24
}
#[doc = "Checks if the value of the field is `GPIO46`"]
#[inline]
pub fn is_gpio46(&self) -> bool {
*self == PAD46FNCSELR::GPIO46
}
#[doc = "Checks if the value of the field is `SCCRST`"]
#[inline]
pub fn is_sccrst(&self) -> bool {
*self == PAD46FNCSELR::SCCRST
}
#[doc = "Checks if the value of the field is `PDMCLK`"]
#[inline]
pub fn is_pdmclk(&self) -> bool {
*self == PAD46FNCSELR::PDMCLK
}
#[doc = "Checks if the value of the field is `UART1TX`"]
#[inline]
pub fn is_uart1tx(&self) -> bool {
*self == PAD46FNCSELR::UART1TX
}
#[doc = "Checks if the value of the field is `SWO`"]
#[inline]
pub fn is_swo(&self) -> bool {
*self == PAD46FNCSELR::SWO
}
}
#[doc = "Possible values of the field `PAD46STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD46STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD46STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD46STRNGR::LOW => false,
PAD46STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD46STRNGR {
match value {
false => PAD46STRNGR::LOW,
true => PAD46STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD46STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD46STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD46INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD46INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD46INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD46INPENR::DIS => false,
PAD46INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD46INPENR {
match value {
false => PAD46INPENR::DIS,
true => PAD46INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD46INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD46INPENR::EN
}
}
#[doc = "Possible values of the field `PAD46PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD46PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD46PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD46PULLR::DIS => false,
PAD46PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD46PULLR {
match value {
false => PAD46PULLR::DIS,
true => PAD46PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD46PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD46PULLR::EN
}
}
#[doc = "Possible values of the field `PAD45FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD45FNCSELR {
#[doc = "Configure as the UART1 CTS input signal value."]
UA1CTS,
#[doc = "IOM/MSPI nCE group 45 value."]
NCE45,
#[doc = "CTIMER connection 22 value."]
CT22,
#[doc = "Configure as GPIO45 value."]
GPIO45,
#[doc = "I2S serial data output value."]
I2SDAT,
#[doc = "PDM serial data input value."]
PDMDATA,
#[doc = "Configure as the SPI channel 5 nCE signal from IOMSTR5 value."]
UART0RX,
#[doc = "Configure as the serial wire debug SWO signal value."]
SWO,
}
impl PAD45FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD45FNCSELR::UA1CTS => 0,
PAD45FNCSELR::NCE45 => 1,
PAD45FNCSELR::CT22 => 2,
PAD45FNCSELR::GPIO45 => 3,
PAD45FNCSELR::I2SDAT => 4,
PAD45FNCSELR::PDMDATA => 5,
PAD45FNCSELR::UART0RX => 6,
PAD45FNCSELR::SWO => 7,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD45FNCSELR {
match value {
0 => PAD45FNCSELR::UA1CTS,
1 => PAD45FNCSELR::NCE45,
2 => PAD45FNCSELR::CT22,
3 => PAD45FNCSELR::GPIO45,
4 => PAD45FNCSELR::I2SDAT,
5 => PAD45FNCSELR::PDMDATA,
6 => PAD45FNCSELR::UART0RX,
7 => PAD45FNCSELR::SWO,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `UA1CTS`"]
#[inline]
pub fn is_ua1cts(&self) -> bool {
*self == PAD45FNCSELR::UA1CTS
}
#[doc = "Checks if the value of the field is `NCE45`"]
#[inline]
pub fn is_nce45(&self) -> bool {
*self == PAD45FNCSELR::NCE45
}
#[doc = "Checks if the value of the field is `CT22`"]
#[inline]
pub fn is_ct22(&self) -> bool {
*self == PAD45FNCSELR::CT22
}
#[doc = "Checks if the value of the field is `GPIO45`"]
#[inline]
pub fn is_gpio45(&self) -> bool {
*self == PAD45FNCSELR::GPIO45
}
#[doc = "Checks if the value of the field is `I2SDAT`"]
#[inline]
pub fn is_i2sdat(&self) -> bool {
*self == PAD45FNCSELR::I2SDAT
}
#[doc = "Checks if the value of the field is `PDMDATA`"]
#[inline]
pub fn is_pdmdata(&self) -> bool {
*self == PAD45FNCSELR::PDMDATA
}
#[doc = "Checks if the value of the field is `UART0RX`"]
#[inline]
pub fn is_uart0rx(&self) -> bool {
*self == PAD45FNCSELR::UART0RX
}
#[doc = "Checks if the value of the field is `SWO`"]
#[inline]
pub fn is_swo(&self) -> bool {
*self == PAD45FNCSELR::SWO
}
}
#[doc = "Possible values of the field `PAD45STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD45STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD45STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD45STRNGR::LOW => false,
PAD45STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD45STRNGR {
match value {
false => PAD45STRNGR::LOW,
true => PAD45STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD45STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD45STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD45INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD45INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD45INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD45INPENR::DIS => false,
PAD45INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD45INPENR {
match value {
false => PAD45INPENR::DIS,
true => PAD45INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD45INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD45INPENR::EN
}
}
#[doc = "Possible values of the field `PAD45PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD45PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD45PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD45PULLR::DIS => false,
PAD45PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD45PULLR {
match value {
false => PAD45PULLR::DIS,
true => PAD45PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD45PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD45PULLR::EN
}
}
#[doc = "Possible values of the field `PAD44FNCSEL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD44FNCSELR {
#[doc = "Configure as the UART1 RTS output signal value."]
UA1RTS,
#[doc = "IOM/MSPI nCE group 44 value."]
NCE44,
#[doc = "CTIMER connection 20 value."]
CT20,
#[doc = "Configure as GPIO44 value."]
GPIO44,
#[doc = "Configure as the IOMSTR4 SPI MOSI signal value."]
M4MOSI,
#[doc = "Configure as the SPI channel 6 nCE signal from IOMSTR5 value."]
M5NCE6,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl PAD44FNCSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
PAD44FNCSELR::UA1RTS => 0,
PAD44FNCSELR::NCE44 => 1,
PAD44FNCSELR::CT20 => 2,
PAD44FNCSELR::GPIO44 => 3,
PAD44FNCSELR::M4MOSI => 5,
PAD44FNCSELR::M5NCE6 => 6,
PAD44FNCSELR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> PAD44FNCSELR {
match value {
0 => PAD44FNCSELR::UA1RTS,
1 => PAD44FNCSELR::NCE44,
2 => PAD44FNCSELR::CT20,
3 => PAD44FNCSELR::GPIO44,
5 => PAD44FNCSELR::M4MOSI,
6 => PAD44FNCSELR::M5NCE6,
i => PAD44FNCSELR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `UA1RTS`"]
#[inline]
pub fn is_ua1rts(&self) -> bool {
*self == PAD44FNCSELR::UA1RTS
}
#[doc = "Checks if the value of the field is `NCE44`"]
#[inline]
pub fn is_nce44(&self) -> bool {
*self == PAD44FNCSELR::NCE44
}
#[doc = "Checks if the value of the field is `CT20`"]
#[inline]
pub fn is_ct20(&self) -> bool {
*self == PAD44FNCSELR::CT20
}
#[doc = "Checks if the value of the field is `GPIO44`"]
#[inline]
pub fn is_gpio44(&self) -> bool {
*self == PAD44FNCSELR::GPIO44
}
#[doc = "Checks if the value of the field is `M4MOSI`"]
#[inline]
pub fn is_m4mosi(&self) -> bool {
*self == PAD44FNCSELR::M4MOSI
}
#[doc = "Checks if the value of the field is `M5NCE6`"]
#[inline]
pub fn is_m5n_ce6(&self) -> bool {
*self == PAD44FNCSELR::M5NCE6
}
}
#[doc = "Possible values of the field `PAD44STRNG`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD44STRNGR {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD44STRNGR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD44STRNGR::LOW => false,
PAD44STRNGR::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD44STRNGR {
match value {
false => PAD44STRNGR::LOW,
true => PAD44STRNGR::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PAD44STRNGR::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PAD44STRNGR::HIGH
}
}
#[doc = "Possible values of the field `PAD44INPEN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD44INPENR {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD44INPENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD44INPENR::DIS => false,
PAD44INPENR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD44INPENR {
match value {
false => PAD44INPENR::DIS,
true => PAD44INPENR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD44INPENR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD44INPENR::EN
}
}
#[doc = "Possible values of the field `PAD44PULL`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PAD44PULLR {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD44PULLR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PAD44PULLR::DIS => false,
PAD44PULLR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PAD44PULLR {
match value {
false => PAD44PULLR::DIS,
true => PAD44PULLR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == PAD44PULLR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == PAD44PULLR::EN
}
}
#[doc = "Values that can be written to the field `PAD47FNCSEL`"]
pub enum PAD47FNCSELW {
#[doc = "Configure as the 32kHz output clock from the crystal value."]
_32KHZXT,
#[doc = "IOM/MSPI nCE group 47 value."]
NCE47,
#[doc = "CTIMER connection 26 value."]
CT26,
#[doc = "Configure as GPIO47 value."]
GPIO47,
#[doc = "Configure as the IOMSTR5 SPI MOSI output signal value."]
M5MOSI,
#[doc = "Configure as the UART1 RX input signal value."]
UART1RX,
}
impl PAD47FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD47FNCSELW::_32KHZXT => 0,
PAD47FNCSELW::NCE47 => 1,
PAD47FNCSELW::CT26 => 2,
PAD47FNCSELW::GPIO47 => 3,
PAD47FNCSELW::M5MOSI => 5,
PAD47FNCSELW::UART1RX => 6,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD47FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD47FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD47FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Configure as the 32kHz output clock from the crystal value."]
#[inline]
pub fn _32k_hz_xt(self) -> &'a mut W {
self.variant(PAD47FNCSELW::_32KHZXT)
}
#[doc = "IOM/MSPI nCE group 47 value."]
#[inline]
pub fn nce47(self) -> &'a mut W {
self.variant(PAD47FNCSELW::NCE47)
}
#[doc = "CTIMER connection 26 value."]
#[inline]
pub fn ct26(self) -> &'a mut W {
self.variant(PAD47FNCSELW::CT26)
}
#[doc = "Configure as GPIO47 value."]
#[inline]
pub fn gpio47(self) -> &'a mut W {
self.variant(PAD47FNCSELW::GPIO47)
}
#[doc = "Configure as the IOMSTR5 SPI MOSI output signal value."]
#[inline]
pub fn m5mosi(self) -> &'a mut W {
self.variant(PAD47FNCSELW::M5MOSI)
}
#[doc = "Configure as the UART1 RX input signal value."]
#[inline]
pub fn uart1rx(self) -> &'a mut W {
self.variant(PAD47FNCSELW::UART1RX)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD47STRNG`"]
pub enum PAD47STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD47STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD47STRNGW::LOW => false,
PAD47STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD47STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD47STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD47STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD47STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD47STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD47INPEN`"]
pub enum PAD47INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD47INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD47INPENW::DIS => false,
PAD47INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD47INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD47INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD47INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD47INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD47INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD47PULL`"]
pub enum PAD47PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD47PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD47PULLW::DIS => false,
PAD47PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD47PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD47PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD47PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD47PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD47PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD46FNCSEL`"]
pub enum PAD46FNCSELW {
#[doc = "Configure as the 32kHz output clock from the crystal value."]
_32KHZ_XT,
#[doc = "IOM/MSPI nCE group 46 value."]
NCE46,
#[doc = "CTIMER connection 24 value."]
CT24,
#[doc = "Configure as GPIO46 value."]
GPIO46,
#[doc = "SCARD reset output value."]
SCCRST,
#[doc = "PDM serial clock output value."]
PDMCLK,
#[doc = "Configure as the UART1 TX output signal value."]
UART1TX,
#[doc = "Configure as the serial wire debug SWO signal value."]
SWO,
}
impl PAD46FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD46FNCSELW::_32KHZ_XT => 0,
PAD46FNCSELW::NCE46 => 1,
PAD46FNCSELW::CT24 => 2,
PAD46FNCSELW::GPIO46 => 3,
PAD46FNCSELW::SCCRST => 4,
PAD46FNCSELW::PDMCLK => 5,
PAD46FNCSELW::UART1TX => 6,
PAD46FNCSELW::SWO => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD46FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD46FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD46FNCSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Configure as the 32kHz output clock from the crystal value."]
#[inline]
pub fn _32khz_xt(self) -> &'a mut W {
self.variant(PAD46FNCSELW::_32KHZ_XT)
}
#[doc = "IOM/MSPI nCE group 46 value."]
#[inline]
pub fn nce46(self) -> &'a mut W {
self.variant(PAD46FNCSELW::NCE46)
}
#[doc = "CTIMER connection 24 value."]
#[inline]
pub fn ct24(self) -> &'a mut W {
self.variant(PAD46FNCSELW::CT24)
}
#[doc = "Configure as GPIO46 value."]
#[inline]
pub fn gpio46(self) -> &'a mut W {
self.variant(PAD46FNCSELW::GPIO46)
}
#[doc = "SCARD reset output value."]
#[inline]
pub fn sccrst(self) -> &'a mut W {
self.variant(PAD46FNCSELW::SCCRST)
}
#[doc = "PDM serial clock output value."]
#[inline]
pub fn pdmclk(self) -> &'a mut W {
self.variant(PAD46FNCSELW::PDMCLK)
}
#[doc = "Configure as the UART1 TX output signal value."]
#[inline]
pub fn uart1tx(self) -> &'a mut W {
self.variant(PAD46FNCSELW::UART1TX)
}
#[doc = "Configure as the serial wire debug SWO signal value."]
#[inline]
pub fn swo(self) -> &'a mut W {
self.variant(PAD46FNCSELW::SWO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD46STRNG`"]
pub enum PAD46STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD46STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD46STRNGW::LOW => false,
PAD46STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD46STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD46STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD46STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD46STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD46STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD46INPEN`"]
pub enum PAD46INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD46INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD46INPENW::DIS => false,
PAD46INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD46INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD46INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD46INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD46INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD46INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD46PULL`"]
pub enum PAD46PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD46PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD46PULLW::DIS => false,
PAD46PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD46PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD46PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD46PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD46PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD46PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD45FNCSEL`"]
pub enum PAD45FNCSELW {
#[doc = "Configure as the UART1 CTS input signal value."]
UA1CTS,
#[doc = "IOM/MSPI nCE group 45 value."]
NCE45,
#[doc = "CTIMER connection 22 value."]
CT22,
#[doc = "Configure as GPIO45 value."]
GPIO45,
#[doc = "I2S serial data output value."]
I2SDAT,
#[doc = "PDM serial data input value."]
PDMDATA,
#[doc = "Configure as the SPI channel 5 nCE signal from IOMSTR5 value."]
UART0RX,
#[doc = "Configure as the serial wire debug SWO signal value."]
SWO,
}
impl PAD45FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD45FNCSELW::UA1CTS => 0,
PAD45FNCSELW::NCE45 => 1,
PAD45FNCSELW::CT22 => 2,
PAD45FNCSELW::GPIO45 => 3,
PAD45FNCSELW::I2SDAT => 4,
PAD45FNCSELW::PDMDATA => 5,
PAD45FNCSELW::UART0RX => 6,
PAD45FNCSELW::SWO => 7,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD45FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD45FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD45FNCSELW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Configure as the UART1 CTS input signal value."]
#[inline]
pub fn ua1cts(self) -> &'a mut W {
self.variant(PAD45FNCSELW::UA1CTS)
}
#[doc = "IOM/MSPI nCE group 45 value."]
#[inline]
pub fn nce45(self) -> &'a mut W {
self.variant(PAD45FNCSELW::NCE45)
}
#[doc = "CTIMER connection 22 value."]
#[inline]
pub fn ct22(self) -> &'a mut W {
self.variant(PAD45FNCSELW::CT22)
}
#[doc = "Configure as GPIO45 value."]
#[inline]
pub fn gpio45(self) -> &'a mut W {
self.variant(PAD45FNCSELW::GPIO45)
}
#[doc = "I2S serial data output value."]
#[inline]
pub fn i2sdat(self) -> &'a mut W {
self.variant(PAD45FNCSELW::I2SDAT)
}
#[doc = "PDM serial data input value."]
#[inline]
pub fn pdmdata(self) -> &'a mut W {
self.variant(PAD45FNCSELW::PDMDATA)
}
#[doc = "Configure as the SPI channel 5 nCE signal from IOMSTR5 value."]
#[inline]
pub fn uart0rx(self) -> &'a mut W {
self.variant(PAD45FNCSELW::UART0RX)
}
#[doc = "Configure as the serial wire debug SWO signal value."]
#[inline]
pub fn swo(self) -> &'a mut W {
self.variant(PAD45FNCSELW::SWO)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD45STRNG`"]
pub enum PAD45STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD45STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD45STRNGW::LOW => false,
PAD45STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD45STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD45STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD45STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD45STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD45STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD45INPEN`"]
pub enum PAD45INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD45INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD45INPENW::DIS => false,
PAD45INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD45INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD45INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD45INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD45INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD45INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD45PULL`"]
pub enum PAD45PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD45PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD45PULLW::DIS => false,
PAD45PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD45PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD45PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD45PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD45PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD45PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD44FNCSEL`"]
pub enum PAD44FNCSELW {
#[doc = "Configure as the UART1 RTS output signal value."]
UA1RTS,
#[doc = "IOM/MSPI nCE group 44 value."]
NCE44,
#[doc = "CTIMER connection 20 value."]
CT20,
#[doc = "Configure as GPIO44 value."]
GPIO44,
#[doc = "Configure as the IOMSTR4 SPI MOSI signal value."]
M4MOSI,
#[doc = "Configure as the SPI channel 6 nCE signal from IOMSTR5 value."]
M5NCE6,
}
impl PAD44FNCSELW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
PAD44FNCSELW::UA1RTS => 0,
PAD44FNCSELW::NCE44 => 1,
PAD44FNCSELW::CT20 => 2,
PAD44FNCSELW::GPIO44 => 3,
PAD44FNCSELW::M4MOSI => 5,
PAD44FNCSELW::M5NCE6 => 6,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD44FNCSELW<'a> {
w: &'a mut W,
}
impl<'a> _PAD44FNCSELW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD44FNCSELW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Configure as the UART1 RTS output signal value."]
#[inline]
pub fn ua1rts(self) -> &'a mut W {
self.variant(PAD44FNCSELW::UA1RTS)
}
#[doc = "IOM/MSPI nCE group 44 value."]
#[inline]
pub fn nce44(self) -> &'a mut W {
self.variant(PAD44FNCSELW::NCE44)
}
#[doc = "CTIMER connection 20 value."]
#[inline]
pub fn ct20(self) -> &'a mut W {
self.variant(PAD44FNCSELW::CT20)
}
#[doc = "Configure as GPIO44 value."]
#[inline]
pub fn gpio44(self) -> &'a mut W {
self.variant(PAD44FNCSELW::GPIO44)
}
#[doc = "Configure as the IOMSTR4 SPI MOSI signal value."]
#[inline]
pub fn m4mosi(self) -> &'a mut W {
self.variant(PAD44FNCSELW::M4MOSI)
}
#[doc = "Configure as the SPI channel 6 nCE signal from IOMSTR5 value."]
#[inline]
pub fn m5n_ce6(self) -> &'a mut W {
self.variant(PAD44FNCSELW::M5NCE6)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD44STRNG`"]
pub enum PAD44STRNGW {
#[doc = "Low drive strength value."]
LOW,
#[doc = "High drive strength value."]
HIGH,
}
impl PAD44STRNGW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD44STRNGW::LOW => false,
PAD44STRNGW::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD44STRNGW<'a> {
w: &'a mut W,
}
impl<'a> _PAD44STRNGW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD44STRNGW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Low drive strength value."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PAD44STRNGW::LOW)
}
#[doc = "High drive strength value."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PAD44STRNGW::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD44INPEN`"]
pub enum PAD44INPENW {
#[doc = "Pad input disabled value."]
DIS,
#[doc = "Pad input enabled value."]
EN,
}
impl PAD44INPENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD44INPENW::DIS => false,
PAD44INPENW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD44INPENW<'a> {
w: &'a mut W,
}
impl<'a> _PAD44INPENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD44INPENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pad input disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD44INPENW::DIS)
}
#[doc = "Pad input enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD44INPENW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PAD44PULL`"]
pub enum PAD44PULLW {
#[doc = "Pullup disabled value."]
DIS,
#[doc = "Pullup enabled value."]
EN,
}
impl PAD44PULLW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PAD44PULLW::DIS => false,
PAD44PULLW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PAD44PULLW<'a> {
w: &'a mut W,
}
impl<'a> _PAD44PULLW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PAD44PULLW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pullup disabled value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(PAD44PULLW::DIS)
}
#[doc = "Pullup enabled value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(PAD44PULLW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 27:29 - Pad 47 function select"]
#[inline]
pub fn pad47fncsel(&self) -> PAD47FNCSELR {
PAD47FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 26 - Pad 47 drive strength"]
#[inline]
pub fn pad47strng(&self) -> PAD47STRNGR {
PAD47STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 25 - Pad 47 input enable"]
#[inline]
pub fn pad47inpen(&self) -> PAD47INPENR {
PAD47INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 24 - Pad 47 pullup enable"]
#[inline]
pub fn pad47pull(&self) -> PAD47PULLR {
PAD47PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 19:21 - Pad 46 function select"]
#[inline]
pub fn pad46fncsel(&self) -> PAD46FNCSELR {
PAD46FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 18 - Pad 46 drive strength"]
#[inline]
pub fn pad46strng(&self) -> PAD46STRNGR {
PAD46STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 17 - Pad 46 input enable"]
#[inline]
pub fn pad46inpen(&self) -> PAD46INPENR {
PAD46INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 16 - Pad 46 pullup enable"]
#[inline]
pub fn pad46pull(&self) -> PAD46PULLR {
PAD46PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 11:13 - Pad 45 function select"]
#[inline]
pub fn pad45fncsel(&self) -> PAD45FNCSELR {
PAD45FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 10 - Pad 45 drive strength"]
#[inline]
pub fn pad45strng(&self) -> PAD45STRNGR {
PAD45STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 9 - Pad 45 input enable"]
#[inline]
pub fn pad45inpen(&self) -> PAD45INPENR {
PAD45INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 8 - Pad 45 pullup enable"]
#[inline]
pub fn pad45pull(&self) -> PAD45PULLR {
PAD45PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 3:5 - Pad 44 function select"]
#[inline]
pub fn pad44fncsel(&self) -> PAD44FNCSELR {
PAD44FNCSELR::_from({
const MASK: u8 = 7;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 2 - Pad 44 drive strength"]
#[inline]
pub fn pad44strng(&self) -> PAD44STRNGR {
PAD44STRNGR::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Pad 44 input enable"]
#[inline]
pub fn pad44inpen(&self) -> PAD44INPENR {
PAD44INPENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 0 - Pad 44 pullup enable"]
#[inline]
pub fn pad44pull(&self) -> PAD44PULLR {
PAD44PULLR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 404232216 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 27:29 - Pad 47 function select"]
#[inline]
pub fn pad47fncsel(&mut self) -> _PAD47FNCSELW {
_PAD47FNCSELW { w: self }
}
#[doc = "Bit 26 - Pad 47 drive strength"]
#[inline]
pub fn pad47strng(&mut self) -> _PAD47STRNGW {
_PAD47STRNGW { w: self }
}
#[doc = "Bit 25 - Pad 47 input enable"]
#[inline]
pub fn pad47inpen(&mut self) -> _PAD47INPENW {
_PAD47INPENW { w: self }
}
#[doc = "Bit 24 - Pad 47 pullup enable"]
#[inline]
pub fn pad47pull(&mut self) -> _PAD47PULLW {
_PAD47PULLW { w: self }
}
#[doc = "Bits 19:21 - Pad 46 function select"]
#[inline]
pub fn pad46fncsel(&mut self) -> _PAD46FNCSELW {
_PAD46FNCSELW { w: self }
}
#[doc = "Bit 18 - Pad 46 drive strength"]
#[inline]
pub fn pad46strng(&mut self) -> _PAD46STRNGW {
_PAD46STRNGW { w: self }
}
#[doc = "Bit 17 - Pad 46 input enable"]
#[inline]
pub fn pad46inpen(&mut self) -> _PAD46INPENW {
_PAD46INPENW { w: self }
}
#[doc = "Bit 16 - Pad 46 pullup enable"]
#[inline]
pub fn pad46pull(&mut self) -> _PAD46PULLW {
_PAD46PULLW { w: self }
}
#[doc = "Bits 11:13 - Pad 45 function select"]
#[inline]
pub fn pad45fncsel(&mut self) -> _PAD45FNCSELW {
_PAD45FNCSELW { w: self }
}
#[doc = "Bit 10 - Pad 45 drive strength"]
#[inline]
pub fn pad45strng(&mut self) -> _PAD45STRNGW {
_PAD45STRNGW { w: self }
}
#[doc = "Bit 9 - Pad 45 input enable"]
#[inline]
pub fn pad45inpen(&mut self) -> _PAD45INPENW {
_PAD45INPENW { w: self }
}
#[doc = "Bit 8 - Pad 45 pullup enable"]
#[inline]
pub fn pad45pull(&mut self) -> _PAD45PULLW {
_PAD45PULLW { w: self }
}
#[doc = "Bits 3:5 - Pad 44 function select"]
#[inline]
pub fn pad44fncsel(&mut self) -> _PAD44FNCSELW {
_PAD44FNCSELW { w: self }
}
#[doc = "Bit 2 - Pad 44 drive strength"]
#[inline]
pub fn pad44strng(&mut self) -> _PAD44STRNGW {
_PAD44STRNGW { w: self }
}
#[doc = "Bit 1 - Pad 44 input enable"]
#[inline]
pub fn pad44inpen(&mut self) -> _PAD44INPENW {
_PAD44INPENW { w: self }
}
#[doc = "Bit 0 - Pad 44 pullup enable"]
#[inline]
pub fn pad44pull(&mut self) -> _PAD44PULLW {
_PAD44PULLW { w: self }
}
}
|
use crate::BufferedReader;
use std::io::{BufWriter, Write};
use base64::encode;
use nu_engine::CallExt;
use nu_protocol::ast::Call;
use nu_protocol::engine::{Command, EngineState, Stack};
use nu_protocol::RawStream;
use nu_protocol::{
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Value,
};
use reqwest::blocking::Response;
use std::collections::HashMap;
use std::io::BufReader;
use reqwest::StatusCode;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::time::Duration;
#[derive(Clone)]
pub struct SubCommand;
impl Command for SubCommand {
fn name(&self) -> &str {
"fetch"
}
fn signature(&self) -> Signature {
Signature::build("fetch")
.required(
"URL",
SyntaxShape::String,
"the URL to fetch the contents from",
)
.named(
"user",
SyntaxShape::Any,
"the username when authenticating",
Some('u'),
)
.named(
"password",
SyntaxShape::Any,
"the password when authenticating",
Some('p'),
)
.named(
"timeout",
SyntaxShape::Int,
"timeout period in seconds",
Some('t'),
)
.named(
"headers",
SyntaxShape::Any,
"custom headers you want to add ",
Some('H'),
)
.switch(
"raw",
"fetch contents as text rather than a table",
Some('r'),
)
.named(
"output",
SyntaxShape::Filepath,
"save contents into a file",
Some('o'),
)
.switch(
"bin",
"if saving into a file, save as raw binary",
Some('b'),
)
.switch(
"append",
"if saving into a file, append to end of file",
Some('a'),
)
.filter()
.category(Category::Network)
}
fn usage(&self) -> &str {
"Fetch the contents from a URL."
}
fn extra_usage(&self) -> &str {
"Performs HTTP GET operation."
}
fn search_terms(&self) -> Vec<&str> {
vec![
"network", "get", "pull", "request", "http", "download", "curl", "wget",
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let output = call.has_flag("output");
if !output {
run_fetch(engine_state, stack, call, input)
} else {
match run_fetch(engine_state, stack, call, input) {
Err(err) => Err(err),
Ok(value) => {
let path: Value = call
.get_flag(engine_state, stack, "output")
.expect("there should be a value")
.expect("value should be unwrappable");
let bin = call.has_flag("bin");
let append = call.has_flag("append");
let span = call.head;
let path = &path.as_string().expect("path should be a string");
let path = Path::new(path);
let file = match (append, path.exists()) {
(true, true) => std::fs::OpenOptions::new()
.write(true)
.append(true)
.open(path),
_ => std::fs::File::create(path),
};
let mut file = match file {
Ok(file) => file,
Err(err) => {
let arg_span =
call.get_named_arg("output").expect("arg should exist").span;
return Ok(PipelineData::Value(
Value::Error {
error: ShellError::GenericError(
"Permission denied".into(),
err.to_string(),
Some(arg_span),
None,
Vec::new(),
),
},
None,
));
}
};
let ext = if bin {
None
} else {
path.extension()
.map(|name| name.to_string_lossy().to_string())
};
if let Some(ext) = ext {
let output =
match engine_state.find_decl(format!("to {}", ext).as_bytes(), &[]) {
Some(converter_id) => {
let output = engine_state.get_decl(converter_id).run(
engine_state,
stack,
&Call::new(span),
value,
)?;
output.into_value(span)
}
None => value.into_value(span),
};
match output {
Value::String { val, .. } => {
if let Err(err) = file.write_all(val.as_bytes()) {
return Err(ShellError::IOError(err.to_string()));
} else {
file.flush()?
}
Ok(PipelineData::new(span))
}
Value::Binary { val, .. } => {
if let Err(err) = file.write_all(&val) {
return Err(ShellError::IOError(err.to_string()));
} else {
file.flush()?
}
Ok(PipelineData::new(span))
}
Value::List { vals, .. } => {
let val = vals
.into_iter()
.map(|it| it.as_string())
.collect::<Result<Vec<String>, ShellError>>()?
.join("\n")
+ "\n";
if let Err(err) = file.write_all(val.as_bytes()) {
return Err(ShellError::IOError(err.to_string()));
} else {
file.flush()?
}
Ok(PipelineData::new(span))
}
v => Err(ShellError::UnsupportedInput(
format!("{:?} not supported", v.get_type()),
span,
)),
}
} else {
match value {
PipelineData::ExternalStream { stdout: None, .. } => {
Ok(PipelineData::new(span))
}
PipelineData::ExternalStream {
stdout: Some(mut stream),
..
} => {
let mut writer = BufWriter::new(file);
stream
.try_for_each(move |result| {
let buf = match result {
Ok(v) => match v {
Value::String { val, .. } => val.into_bytes(),
Value::Binary { val, .. } => val,
_ => {
return Err(ShellError::UnsupportedInput(
format!("{:?} not supported", v.get_type()),
v.span()?,
));
}
},
Err(err) => return Err(err),
};
if let Err(err) = writer.write(&buf) {
return Err(ShellError::IOError(err.to_string()));
}
Ok(())
})
.map(|_| PipelineData::new(span))
}
value => match value.into_value(span) {
Value::String { val, .. } => {
if let Err(err) = file.write_all(val.as_bytes()) {
return Err(ShellError::IOError(err.to_string()));
} else {
file.flush()?
}
Ok(PipelineData::new(span))
}
Value::Binary { val, .. } => {
if let Err(err) = file.write_all(&val) {
return Err(ShellError::IOError(err.to_string()));
} else {
file.flush()?
}
Ok(PipelineData::new(span))
}
Value::List { vals, .. } => {
let val = vals
.into_iter()
.map(|it| it.as_string())
.collect::<Result<Vec<String>, ShellError>>()?
.join("\n")
+ "\n";
if let Err(err) = file.write_all(val.as_bytes()) {
return Err(ShellError::IOError(err.to_string()));
} else {
file.flush()?
}
Ok(PipelineData::new(span))
}
v => Err(ShellError::UnsupportedInput(
format!("{:?} not supported", v.get_type()),
span,
)),
},
}
}
}
}
}
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Fetch content from url.com",
example: "fetch url.com",
result: None,
},
Example {
description: "Fetch content from url.com, with username and password",
example: "fetch -u myuser -p mypass url.com",
result: None,
},
Example {
description: "Fetch content from url.com, with custom header",
example: "fetch -H [my-header-key my-header-value] url.com",
result: None,
},
]
}
}
struct Arguments {
url: Option<Value>,
raw: bool,
user: Option<String>,
password: Option<String>,
timeout: Option<Value>,
headers: Option<Value>,
}
fn run_fetch(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let args = Arguments {
url: Some(call.req(engine_state, stack, 0)?),
raw: call.has_flag("raw"),
user: call.get_flag(engine_state, stack, "user")?,
password: call.get_flag(engine_state, stack, "password")?,
timeout: call.get_flag(engine_state, stack, "timeout")?,
headers: call.get_flag(engine_state, stack, "headers")?,
};
helper(engine_state, stack, call, args)
}
// Helper function that actually goes to retrieve the resource from the url given
// The Option<String> return a possible file extension which can be used in AutoConvert commands
fn helper(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
args: Arguments,
) -> std::result::Result<PipelineData, ShellError> {
let url_value = if let Some(val) = args.url {
val
} else {
return Err(ShellError::UnsupportedInput(
"Expecting a url as a string but got nothing".to_string(),
call.head,
));
};
let span = url_value.span()?;
let requested_url = url_value.as_string()?;
let url = match url::Url::parse(&requested_url) {
Ok(u) => u,
Err(_e) => {
return Err(ShellError::UnsupportedInput(
"Incomplete or incorrect url. Expected a full url, e.g., https://www.example.com"
.to_string(),
span,
));
}
};
let user = args.user.clone();
let password = args.password;
let timeout = args.timeout;
let headers = args.headers;
let raw = args.raw;
let login = match (user, password) {
(Some(user), Some(password)) => Some(encode(&format!("{}:{}", user, password))),
(Some(user), _) => Some(encode(&format!("{}:", user))),
_ => None,
};
let client = http_client();
let mut request = client.get(url);
if let Some(timeout) = timeout {
let val = timeout.as_i64()?;
if val.is_negative() || val < 1 {
return Err(ShellError::UnsupportedInput(
"Timeout value must be an integer and larger than 0".to_string(),
timeout.span().unwrap_or_else(|_| Span::new(0, 0)),
));
}
request = request.timeout(Duration::from_secs(val as u64));
}
if let Some(login) = login {
request = request.header("Authorization", format!("Basic {}", login));
}
if let Some(headers) = headers {
let mut custom_headers: HashMap<String, Value> = HashMap::new();
match &headers {
Value::List { vals: table, .. } => {
if table.len() == 1 {
// single row([key1 key2]; [val1 val2])
match &table[0] {
Value::Record { cols, vals, .. } => {
for (k, v) in cols.iter().zip(vals.iter()) {
custom_headers.insert(k.to_string(), v.clone());
}
}
x => {
return Err(ShellError::CantConvert(
"string list or single row".into(),
x.get_type().to_string(),
headers.span().unwrap_or_else(|_| Span::new(0, 0)),
None,
));
}
}
} else {
// primitive values ([key1 val1 key2 val2])
for row in table.chunks(2) {
if row.len() == 2 {
custom_headers.insert(row[0].as_string()?, (&row[1]).clone());
}
}
}
}
x => {
return Err(ShellError::CantConvert(
"string list or single row".into(),
x.get_type().to_string(),
headers.span().unwrap_or_else(|_| Span::new(0, 0)),
None,
));
}
};
for (k, v) in &custom_headers {
if let Ok(s) = v.as_string() {
request = request.header(k, s);
}
}
}
match request.send() {
Ok(resp) => match resp.headers().get("content-type") {
Some(content_type) => {
let content_type = content_type.to_str().map_err(|e| {
ShellError::GenericError(
e.to_string(),
"".to_string(),
None,
Some("MIME type were invalid".to_string()),
Vec::new(),
)
})?;
let content_type = mime::Mime::from_str(content_type).map_err(|_| {
ShellError::GenericError(
format!("MIME type unknown: {}", content_type),
"".to_string(),
None,
Some("given unknown MIME type".to_string()),
Vec::new(),
)
})?;
let ext = match (content_type.type_(), content_type.subtype()) {
(mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(&requested_url)
.map_err(|_| {
ShellError::GenericError(
format!("Cannot parse URL: {}", requested_url),
"".to_string(),
None,
Some("cannot parse".to_string()),
Vec::new(),
)
})?
.path_segments()
.and_then(|segments| segments.last())
.and_then(|name| if name.is_empty() { None } else { Some(name) })
.and_then(|name| {
PathBuf::from(name)
.extension()
.map(|name| name.to_string_lossy().to_string())
});
path_extension
}
_ => Some(content_type.subtype().to_string()),
};
let output = response_to_buffer(resp, engine_state, span);
if raw {
return Ok(output);
}
if let Some(ext) = ext {
match engine_state.find_decl(format!("from {}", ext).as_bytes(), &[]) {
Some(converter_id) => engine_state.get_decl(converter_id).run(
engine_state,
stack,
&Call::new(span),
output,
),
None => Ok(output),
}
} else {
Ok(output)
}
}
None => Ok(response_to_buffer(resp, engine_state, span)),
},
Err(e) if e.is_timeout() => Err(ShellError::NetworkFailure(
format!("Request to {} has timed out", requested_url),
span,
)),
Err(e) if e.is_status() => match e.status() {
Some(err_code) if err_code == StatusCode::NOT_FOUND => Err(ShellError::NetworkFailure(
format!("Requested file not found (404): {:?}", requested_url),
span,
)),
Some(err_code) if err_code == StatusCode::MOVED_PERMANENTLY => {
Err(ShellError::NetworkFailure(
format!("Resource moved permanently (301): {:?}", requested_url),
span,
))
}
Some(err_code) if err_code == StatusCode::BAD_REQUEST => {
Err(ShellError::NetworkFailure(
format!("Bad request (400) to {:?}", requested_url),
span,
))
}
Some(err_code) if err_code == StatusCode::FORBIDDEN => Err(ShellError::NetworkFailure(
format!("Access forbidden (403) to {:?}", requested_url),
span,
)),
_ => Err(ShellError::NetworkFailure(
format!(
"Cannot make request to {:?}. Error is {:?}",
requested_url,
e.to_string()
),
span,
)),
},
Err(e) => Err(ShellError::NetworkFailure(
format!(
"Cannot make request to {:?}. Error is {:?}",
requested_url,
e.to_string()
),
span,
)),
}
}
fn response_to_buffer(
response: Response,
engine_state: &EngineState,
span: Span,
) -> nu_protocol::PipelineData {
let buffered_input = BufReader::new(response);
PipelineData::ExternalStream {
stdout: Some(RawStream::new(
Box::new(BufferedReader {
input: buffered_input,
}),
engine_state.ctrlc.clone(),
span,
)),
stderr: None,
exit_code: None,
span,
metadata: None,
}
}
// Only panics if the user agent is invalid but we define it statically so either
// it always or never fails
#[allow(clippy::unwrap_used)]
fn http_client() -> reqwest::blocking::Client {
reqwest::blocking::Client::builder()
.user_agent("nushell")
.build()
.unwrap()
}
|
use super::{
Check,
};
use ident::Identifier;
use ty::Ty;
use stmt::{
Statement,
fun::{
Fun,
FunParam
},
};
use ir::{
Chunk,
};
use super::{
Typeck,
Load,
Unload,
};
use ir::hir::HIRInstruction;
use ir_traits::{ReadInstruction, WriteInstruction};
use notices::{
DiagnosticSourceBuilder,
DiagnosticLevel,
};
impl Unload for FunParam{
fn unload(&self) -> Result<Chunk, ()> {
let mut chunk = Chunk::new();
chunk.write_instruction(HIRInstruction::FnParam);
chunk.write_pos(self.pos);
match self.ident.unload(){
Ok(ch) => chunk.write_chunk(ch),
Err(notice) => return Err(notice)
}
match self.ty.unload(){
Ok(ch) => chunk.write_chunk(ch),
Err(notice) => return Err(notice)
}
Ok(chunk)
}
}
impl<'a> Check<'a> for Fun{
fn check(&self, typeck: &'a Typeck) -> Result<(), ()> {
for statement in self.body.iter(){
if let Err(notice) = statement.check(typeck){
return Err(notice)
}
}
Ok(())
}
}
impl Load for Fun{
type Output = Fun;
fn load(chunk: &Chunk, typeck: &Typeck) -> Result<Option<Self::Output>, ()> {
let pos = match chunk.read_pos(){
Ok(pos) => pos,
Err(msg) => {
let diag_source = DiagnosticSourceBuilder::new(typeck.module_name.clone(), 0)
.level(DiagnosticLevel::Error)
.message(msg)
.build();
typeck.emit_diagnostic(&[], &[diag_source]);
return Err(())
}
};
let ident = match Identifier::load(chunk, typeck){
Ok(Some(ident)) => ident,
Ok(None) => return Ok(None),
Err(msg) => return Err(msg)
};
let mut params = vec![];
while let Some(ins) = chunk.read_instruction() as Option<HIRInstruction>{
if ins == HIRInstruction::EndParams{
break;
}
if ins != HIRInstruction::FnParam{
let pos = match chunk.read_pos(){
Ok(pos) => pos,
Err(msg) => {
let diag_source = DiagnosticSourceBuilder::new(typeck.module_name.clone(), 0)
.message(msg)
.level(DiagnosticLevel::Error)
.build();
typeck.emit_diagnostic(&[], &[diag_source]);
return Err(())
}
};
let source = match typeck.request_source_snippet(pos){
Ok(source) => source,
Err(diag) => {
typeck.emit_diagnostic(&[], &[diag]);
return Err(())
}
};
let diag_source = DiagnosticSourceBuilder::new(typeck.module_name.clone(), 0)
.message(format!("Expected an fn param instruction but instead got {:?}", ins))
.level(DiagnosticLevel::Error)
.source(source)
.build();
typeck.emit_diagnostic(&[format!("This is a bug in the compiler.")], &[diag_source]);
return Err(())
}
let param_ident = match Identifier::load(chunk, typeck){
Ok(Some(ident)) => ident,
Ok(None) => return Ok(None),
Err(msg) => return Err(msg)
};
let param_type = match Ty::load(chunk, typeck){
Ok(Some(ty)) => ty,
Ok(None) => return Ok(None),
Err(notice) => return Err(notice)
};
params.push(FunParam{
ident: param_ident,
ty: param_type,
pos
});
}
let return_type = match Ty::load(chunk, typeck){
Ok(Some(ty)) => ty,
Ok(None) => return Ok(None),
Err(notice) => return Err(notice)
};
let block_chunk = match typeck.chunk_rx.recv(){
Ok(Some(chunk)) => {
chunk
}
Ok(None) => {
let report = DiagnosticSourceBuilder::new(typeck.module_name.clone(), 0)
.message(format!("Failed to get chunk from chunk channel."))
.level(DiagnosticLevel::Error)
.build();
typeck.emit_diagnostic(&[
format!("The previous error should only have occurred during development. If you are a user then please notify the author.")
],
&[report]
);
return Err(())
}
Err(_) =>{
let report = DiagnosticSourceBuilder::new(typeck.module_name.clone(), 0)
.message(format!("Failed to get chunk from chunk channel."))
.level(DiagnosticLevel::Error)
.build();
typeck.emit_diagnostic(&[
format!("The previous error should only have occurred during development. If you are a user then please notify the author.")
],
&[report]
);
return Err(())
}
};
let block = block_chunk.read_instruction();
let mut block: Vec<Statement> = if let Some(HIRInstruction::Block) = block{
vec![]
}else{
let pos = match chunk.read_pos(){
Ok(pos) => pos,
Err(msg) => {
let diag_source = DiagnosticSourceBuilder::new(typeck.module_name.clone(), 0)
.message(msg)
.level(DiagnosticLevel::Error)
.build();
typeck.emit_diagnostic(&[], &[diag_source]);
return Err(())
}
};
let source = match typeck.request_source_snippet(pos){
Ok(source) => source,
Err(diag) => {
typeck.emit_diagnostic(&[], &[diag]);
return Err(())
}
};
let report = DiagnosticSourceBuilder::new(typeck.module_name.clone(), 0)
.message(format!("Expected a function body but instead got {:?}.", block))
.level(DiagnosticLevel::Error)
.source(source)
.range(pos.col_range())
.build();
typeck.emit_diagnostic(&[], &[report]);
return Err(())
};
loop{
let next_chunk = typeck.chunk_rx.recv().unwrap().unwrap();
if let Some(HIRInstruction::EndBlock) = next_chunk.read_instruction(){
break;
}
next_chunk.jump_to(0).unwrap();
let statement = match Statement::load(&next_chunk, typeck){
Ok(Some(statement)) => statement,
Ok(None) => return Ok(None),
Err(notice) => return Err(notice)
};
block.push(statement);
}
let fun = Fun{
ident,
ty: return_type,
body: block,
params,
pos
};
Ok(Some(fun))
}
}
impl Unload for Fun{
fn unload(&self) -> Result<Chunk, ()> {
let mut chunk = Chunk::new();
chunk.write_instruction(HIRInstruction::Fn);
chunk.write_pos(self.pos);
//Write the identifier
match self.ident.unload(){
Ok(ch) => chunk.write_chunk(ch),
Err(notice) => return Err(notice)
}
//Write the params information
for param in self.params.iter(){
match param.unload(){
Ok(ch) => chunk.write_chunk(ch),
Err(notice) => return Err(notice)
}
}
chunk.write_instruction(HIRInstruction::EndParams);
//Write the return type information
match self.ty.unload(){
Ok(ch) => chunk.write_chunk(ch),
Err(notice) => return Err(notice)
}
//Write the body
for statement in self.body.iter(){
match statement.unload(){
Ok(ch) => chunk.write_chunk(ch),
Err(notice) => return Err(notice)
}
}
chunk.write_instruction(HIRInstruction::EndFn);
Ok(chunk)
}
} |
/// An enum to represent all characters in the MiscellaneousSymbolsandArrows block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum MiscellaneousSymbolsandArrows {
/// \u{2b00}: '⬀'
NorthEastWhiteArrow,
/// \u{2b01}: '⬁'
NorthWestWhiteArrow,
/// \u{2b02}: '⬂'
SouthEastWhiteArrow,
/// \u{2b03}: '⬃'
SouthWestWhiteArrow,
/// \u{2b04}: '⬄'
LeftRightWhiteArrow,
/// \u{2b05}: '⬅'
LeftwardsBlackArrow,
/// \u{2b06}: '⬆'
UpwardsBlackArrow,
/// \u{2b07}: '⬇'
DownwardsBlackArrow,
/// \u{2b08}: '⬈'
NorthEastBlackArrow,
/// \u{2b09}: '⬉'
NorthWestBlackArrow,
/// \u{2b0a}: '⬊'
SouthEastBlackArrow,
/// \u{2b0b}: '⬋'
SouthWestBlackArrow,
/// \u{2b0c}: '⬌'
LeftRightBlackArrow,
/// \u{2b0d}: '⬍'
UpDownBlackArrow,
/// \u{2b0e}: '⬎'
RightwardsArrowWithTipDownwards,
/// \u{2b0f}: '⬏'
RightwardsArrowWithTipUpwards,
/// \u{2b10}: '⬐'
LeftwardsArrowWithTipDownwards,
/// \u{2b11}: '⬑'
LeftwardsArrowWithTipUpwards,
/// \u{2b12}: '⬒'
SquareWithTopHalfBlack,
/// \u{2b13}: '⬓'
SquareWithBottomHalfBlack,
/// \u{2b14}: '⬔'
SquareWithUpperRightDiagonalHalfBlack,
/// \u{2b15}: '⬕'
SquareWithLowerLeftDiagonalHalfBlack,
/// \u{2b16}: '⬖'
DiamondWithLeftHalfBlack,
/// \u{2b17}: '⬗'
DiamondWithRightHalfBlack,
/// \u{2b18}: '⬘'
DiamondWithTopHalfBlack,
/// \u{2b19}: '⬙'
DiamondWithBottomHalfBlack,
/// \u{2b1a}: '⬚'
DottedSquare,
/// \u{2b1b}: '⬛'
BlackLargeSquare,
/// \u{2b1c}: '⬜'
WhiteLargeSquare,
/// \u{2b1d}: '⬝'
BlackVerySmallSquare,
/// \u{2b1e}: '⬞'
WhiteVerySmallSquare,
/// \u{2b1f}: '⬟'
BlackPentagon,
/// \u{2b20}: '⬠'
WhitePentagon,
/// \u{2b21}: '⬡'
WhiteHexagon,
/// \u{2b22}: '⬢'
BlackHexagon,
/// \u{2b23}: '⬣'
HorizontalBlackHexagon,
/// \u{2b24}: '⬤'
BlackLargeCircle,
/// \u{2b25}: '⬥'
BlackMediumDiamond,
/// \u{2b26}: '⬦'
WhiteMediumDiamond,
/// \u{2b27}: '⬧'
BlackMediumLozenge,
/// \u{2b28}: '⬨'
WhiteMediumLozenge,
/// \u{2b29}: '⬩'
BlackSmallDiamond,
/// \u{2b2a}: '⬪'
BlackSmallLozenge,
/// \u{2b2b}: '⬫'
WhiteSmallLozenge,
/// \u{2b2c}: '⬬'
BlackHorizontalEllipse,
/// \u{2b2d}: '⬭'
WhiteHorizontalEllipse,
/// \u{2b2e}: '⬮'
BlackVerticalEllipse,
/// \u{2b2f}: '⬯'
WhiteVerticalEllipse,
/// \u{2b30}: '⬰'
LeftArrowWithSmallCircle,
/// \u{2b31}: '⬱'
ThreeLeftwardsArrows,
/// \u{2b32}: '⬲'
LeftArrowWithCircledPlus,
/// \u{2b33}: '⬳'
LongLeftwardsSquiggleArrow,
/// \u{2b34}: '⬴'
LeftwardsTwoDashHeadedArrowWithVerticalStroke,
/// \u{2b35}: '⬵'
LeftwardsTwoDashHeadedArrowWithDoubleVerticalStroke,
/// \u{2b36}: '⬶'
LeftwardsTwoDashHeadedArrowFromBar,
/// \u{2b37}: '⬷'
LeftwardsTwoDashHeadedTripleDashArrow,
/// \u{2b38}: '⬸'
LeftwardsArrowWithDottedStem,
/// \u{2b39}: '⬹'
LeftwardsArrowWithTailWithVerticalStroke,
/// \u{2b3a}: '⬺'
LeftwardsArrowWithTailWithDoubleVerticalStroke,
/// \u{2b3b}: '⬻'
LeftwardsTwoDashHeadedArrowWithTail,
/// \u{2b3c}: '⬼'
LeftwardsTwoDashHeadedArrowWithTailWithVerticalStroke,
/// \u{2b3d}: '⬽'
LeftwardsTwoDashHeadedArrowWithTailWithDoubleVerticalStroke,
/// \u{2b3e}: '⬾'
LeftwardsArrowThroughX,
/// \u{2b3f}: '⬿'
WaveArrowPointingDirectlyLeft,
/// \u{2b40}: '⭀'
EqualsSignAboveLeftwardsArrow,
/// \u{2b41}: '⭁'
ReverseTildeOperatorAboveLeftwardsArrow,
/// \u{2b42}: '⭂'
LeftwardsArrowAboveReverseAlmostEqualTo,
/// \u{2b43}: '⭃'
RightwardsArrowThroughGreaterDashThan,
/// \u{2b44}: '⭄'
RightwardsArrowThroughSuperset,
/// \u{2b45}: '⭅'
LeftwardsQuadrupleArrow,
/// \u{2b46}: '⭆'
RightwardsQuadrupleArrow,
/// \u{2b47}: '⭇'
ReverseTildeOperatorAboveRightwardsArrow,
/// \u{2b48}: '⭈'
RightwardsArrowAboveReverseAlmostEqualTo,
/// \u{2b49}: '⭉'
TildeOperatorAboveLeftwardsArrow,
/// \u{2b4a}: '⭊'
LeftwardsArrowAboveAlmostEqualTo,
/// \u{2b4b}: '⭋'
LeftwardsArrowAboveReverseTildeOperator,
/// \u{2b4c}: '⭌'
RightwardsArrowAboveReverseTildeOperator,
/// \u{2b4d}: '⭍'
DownwardsTriangleDashHeadedZigzagArrow,
/// \u{2b4e}: '⭎'
ShortSlantedNorthArrow,
/// \u{2b4f}: '⭏'
ShortBackslantedSouthArrow,
/// \u{2b50}: '⭐'
WhiteMediumStar,
/// \u{2b51}: '⭑'
BlackSmallStar,
/// \u{2b52}: '⭒'
WhiteSmallStar,
/// \u{2b53}: '⭓'
BlackRightDashPointingPentagon,
/// \u{2b54}: '⭔'
WhiteRightDashPointingPentagon,
/// \u{2b55}: '⭕'
HeavyLargeCircle,
/// \u{2b56}: '⭖'
HeavyOvalWithOvalInside,
/// \u{2b57}: '⭗'
HeavyCircleWithCircleInside,
/// \u{2b58}: '⭘'
HeavyCircle,
/// \u{2b59}: '⭙'
HeavyCircledSaltire,
/// \u{2b5a}: '⭚'
SlantedNorthArrowWithHookedHead,
/// \u{2b5b}: '⭛'
BackslantedSouthArrowWithHookedTail,
/// \u{2b5c}: '⭜'
SlantedNorthArrowWithHorizontalTail,
/// \u{2b5d}: '⭝'
BackslantedSouthArrowWithHorizontalTail,
/// \u{2b5e}: '⭞'
BentArrowPointingDownwardsThenNorthEast,
/// \u{2b5f}: '⭟'
ShortBentArrowPointingDownwardsThenNorthEast,
/// \u{2b60}: '⭠'
LeftwardsTriangleDashHeadedArrow,
/// \u{2b61}: '⭡'
UpwardsTriangleDashHeadedArrow,
/// \u{2b62}: '⭢'
RightwardsTriangleDashHeadedArrow,
/// \u{2b63}: '⭣'
DownwardsTriangleDashHeadedArrow,
/// \u{2b64}: '⭤'
LeftRightTriangleDashHeadedArrow,
/// \u{2b65}: '⭥'
UpDownTriangleDashHeadedArrow,
/// \u{2b66}: '⭦'
NorthWestTriangleDashHeadedArrow,
/// \u{2b67}: '⭧'
NorthEastTriangleDashHeadedArrow,
/// \u{2b68}: '⭨'
SouthEastTriangleDashHeadedArrow,
/// \u{2b69}: '⭩'
SouthWestTriangleDashHeadedArrow,
/// \u{2b6a}: '⭪'
LeftwardsTriangleDashHeadedDashedArrow,
/// \u{2b6b}: '⭫'
UpwardsTriangleDashHeadedDashedArrow,
/// \u{2b6c}: '⭬'
RightwardsTriangleDashHeadedDashedArrow,
/// \u{2b6d}: '⭭'
DownwardsTriangleDashHeadedDashedArrow,
/// \u{2b6e}: '⭮'
ClockwiseTriangleDashHeadedOpenCircleArrow,
/// \u{2b6f}: '⭯'
AnticlockwiseTriangleDashHeadedOpenCircleArrow,
/// \u{2b70}: '⭰'
LeftwardsTriangleDashHeadedArrowToBar,
/// \u{2b71}: '⭱'
UpwardsTriangleDashHeadedArrowToBar,
/// \u{2b72}: '⭲'
RightwardsTriangleDashHeadedArrowToBar,
/// \u{2b73}: '⭳'
DownwardsTriangleDashHeadedArrowToBar,
/// \u{2b76}: '⭶'
NorthWestTriangleDashHeadedArrowToBar,
/// \u{2b77}: '⭷'
NorthEastTriangleDashHeadedArrowToBar,
/// \u{2b78}: '⭸'
SouthEastTriangleDashHeadedArrowToBar,
/// \u{2b79}: '⭹'
SouthWestTriangleDashHeadedArrowToBar,
/// \u{2b7a}: '⭺'
LeftwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke,
/// \u{2b7b}: '⭻'
UpwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke,
/// \u{2b7c}: '⭼'
RightwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke,
/// \u{2b7d}: '⭽'
DownwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke,
/// \u{2b7e}: '⭾'
HorizontalTabKey,
/// \u{2b7f}: '⭿'
VerticalTabKey,
/// \u{2b80}: '⮀'
LeftwardsTriangleDashHeadedArrowOverRightwardsTriangleDashHeadedArrow,
/// \u{2b81}: '⮁'
UpwardsTriangleDashHeadedArrowLeftwardsOfDownwardsTriangleDashHeadedArrow,
/// \u{2b82}: '⮂'
RightwardsTriangleDashHeadedArrowOverLeftwardsTriangleDashHeadedArrow,
/// \u{2b83}: '⮃'
DownwardsTriangleDashHeadedArrowLeftwardsOfUpwardsTriangleDashHeadedArrow,
/// \u{2b84}: '⮄'
LeftwardsTriangleDashHeadedPairedArrows,
/// \u{2b85}: '⮅'
UpwardsTriangleDashHeadedPairedArrows,
/// \u{2b86}: '⮆'
RightwardsTriangleDashHeadedPairedArrows,
/// \u{2b87}: '⮇'
DownwardsTriangleDashHeadedPairedArrows,
/// \u{2b88}: '⮈'
LeftwardsBlackCircledWhiteArrow,
/// \u{2b89}: '⮉'
UpwardsBlackCircledWhiteArrow,
/// \u{2b8a}: '⮊'
RightwardsBlackCircledWhiteArrow,
/// \u{2b8b}: '⮋'
DownwardsBlackCircledWhiteArrow,
/// \u{2b8c}: '⮌'
AnticlockwiseTriangleDashHeadedRightUDashShapedArrow,
/// \u{2b8d}: '⮍'
AnticlockwiseTriangleDashHeadedBottomUDashShapedArrow,
/// \u{2b8e}: '⮎'
AnticlockwiseTriangleDashHeadedLeftUDashShapedArrow,
/// \u{2b8f}: '⮏'
AnticlockwiseTriangleDashHeadedTopUDashShapedArrow,
/// \u{2b90}: '⮐'
ReturnLeft,
/// \u{2b91}: '⮑'
ReturnRight,
/// \u{2b92}: '⮒'
NewlineLeft,
/// \u{2b93}: '⮓'
NewlineRight,
/// \u{2b94}: '⮔'
FourCornerArrowsCirclingAnticlockwise,
/// \u{2b95}: '⮕'
RightwardsBlackArrow,
/// \u{2b98}: '⮘'
ThreeDashDTopDashLightedLeftwardsEquilateralArrowhead,
/// \u{2b99}: '⮙'
ThreeDashDRightDashLightedUpwardsEquilateralArrowhead,
/// \u{2b9a}: '⮚'
ThreeDashDTopDashLightedRightwardsEquilateralArrowhead,
/// \u{2b9b}: '⮛'
ThreeDashDLeftDashLightedDownwardsEquilateralArrowhead,
/// \u{2b9c}: '⮜'
BlackLeftwardsEquilateralArrowhead,
/// \u{2b9d}: '⮝'
BlackUpwardsEquilateralArrowhead,
/// \u{2b9e}: '⮞'
BlackRightwardsEquilateralArrowhead,
/// \u{2b9f}: '⮟'
BlackDownwardsEquilateralArrowhead,
/// \u{2ba0}: '⮠'
DownwardsTriangleDashHeadedArrowWithLongTipLeftwards,
/// \u{2ba1}: '⮡'
DownwardsTriangleDashHeadedArrowWithLongTipRightwards,
/// \u{2ba2}: '⮢'
UpwardsTriangleDashHeadedArrowWithLongTipLeftwards,
/// \u{2ba3}: '⮣'
UpwardsTriangleDashHeadedArrowWithLongTipRightwards,
/// \u{2ba4}: '⮤'
LeftwardsTriangleDashHeadedArrowWithLongTipUpwards,
/// \u{2ba5}: '⮥'
RightwardsTriangleDashHeadedArrowWithLongTipUpwards,
/// \u{2ba6}: '⮦'
LeftwardsTriangleDashHeadedArrowWithLongTipDownwards,
/// \u{2ba7}: '⮧'
RightwardsTriangleDashHeadedArrowWithLongTipDownwards,
/// \u{2ba8}: '⮨'
BlackCurvedDownwardsAndLeftwardsArrow,
/// \u{2ba9}: '⮩'
BlackCurvedDownwardsAndRightwardsArrow,
/// \u{2baa}: '⮪'
BlackCurvedUpwardsAndLeftwardsArrow,
/// \u{2bab}: '⮫'
BlackCurvedUpwardsAndRightwardsArrow,
/// \u{2bac}: '⮬'
BlackCurvedLeftwardsAndUpwardsArrow,
/// \u{2bad}: '⮭'
BlackCurvedRightwardsAndUpwardsArrow,
/// \u{2bae}: '⮮'
BlackCurvedLeftwardsAndDownwardsArrow,
/// \u{2baf}: '⮯'
BlackCurvedRightwardsAndDownwardsArrow,
/// \u{2bb0}: '⮰'
RibbonArrowDownLeft,
/// \u{2bb1}: '⮱'
RibbonArrowDownRight,
/// \u{2bb2}: '⮲'
RibbonArrowUpLeft,
/// \u{2bb3}: '⮳'
RibbonArrowUpRight,
/// \u{2bb4}: '⮴'
RibbonArrowLeftUp,
/// \u{2bb5}: '⮵'
RibbonArrowRightUp,
/// \u{2bb6}: '⮶'
RibbonArrowLeftDown,
/// \u{2bb7}: '⮷'
RibbonArrowRightDown,
/// \u{2bb8}: '⮸'
UpwardsWhiteArrowFromBarWithHorizontalBar,
/// \u{2bb9}: '⮹'
UpArrowheadInARectangleBox,
/// \u{2bba}: '⮺'
OverlappingWhiteSquares,
/// \u{2bbb}: '⮻'
OverlappingWhiteAndBlackSquares,
/// \u{2bbc}: '⮼'
OverlappingBlackSquares,
/// \u{2bbd}: '⮽'
BallotBoxWithLightX,
/// \u{2bbe}: '⮾'
CircledX,
/// \u{2bbf}: '⮿'
CircledBoldX,
/// \u{2bc0}: '⯀'
BlackSquareCentred,
/// \u{2bc1}: '⯁'
BlackDiamondCentred,
/// \u{2bc2}: '⯂'
TurnedBlackPentagon,
/// \u{2bc3}: '⯃'
HorizontalBlackOctagon,
/// \u{2bc4}: '⯄'
BlackOctagon,
/// \u{2bc5}: '⯅'
BlackMediumUpDashPointingTriangleCentred,
/// \u{2bc6}: '⯆'
BlackMediumDownDashPointingTriangleCentred,
/// \u{2bc7}: '⯇'
BlackMediumLeftDashPointingTriangleCentred,
/// \u{2bc8}: '⯈'
BlackMediumRightDashPointingTriangleCentred,
/// \u{2bc9}: '⯉'
NeptuneFormTwo,
/// \u{2bca}: '⯊'
TopHalfBlackCircle,
/// \u{2bcb}: '⯋'
BottomHalfBlackCircle,
/// \u{2bcc}: '⯌'
LightFourPointedBlackCusp,
/// \u{2bcd}: '⯍'
RotatedLightFourPointedBlackCusp,
/// \u{2bce}: '⯎'
WhiteFourPointedCusp,
/// \u{2bcf}: '⯏'
RotatedWhiteFourPointedCusp,
/// \u{2bd0}: '⯐'
SquarePositionIndicator,
/// \u{2bd1}: '⯑'
UncertaintySign,
/// \u{2bd2}: '⯒'
GroupMark,
/// \u{2bd3}: '⯓'
PlutoFormTwo,
/// \u{2bd4}: '⯔'
PlutoFormThree,
/// \u{2bd5}: '⯕'
PlutoFormFour,
/// \u{2bd6}: '⯖'
PlutoFormFive,
/// \u{2bd7}: '⯗'
Transpluto,
/// \u{2bd8}: '⯘'
Proserpina,
/// \u{2bd9}: '⯙'
Astraea,
/// \u{2bda}: '⯚'
Hygiea,
/// \u{2bdb}: '⯛'
Pholus,
/// \u{2bdc}: '⯜'
Nessus,
/// \u{2bdd}: '⯝'
WhiteMoonSelena,
/// \u{2bde}: '⯞'
BlackDiamondOnCross,
/// \u{2bdf}: '⯟'
TrueLightMoonArta,
/// \u{2be0}: '⯠'
Cupido,
/// \u{2be1}: '⯡'
Hades,
/// \u{2be2}: '⯢'
Zeus,
/// \u{2be3}: '⯣'
Kronos,
/// \u{2be4}: '⯤'
Apollon,
/// \u{2be5}: '⯥'
Admetos,
/// \u{2be6}: '⯦'
Vulcanus,
/// \u{2be7}: '⯧'
Poseidon,
/// \u{2be8}: '⯨'
LeftHalfBlackStar,
/// \u{2be9}: '⯩'
RightHalfBlackStar,
/// \u{2bea}: '⯪'
StarWithLeftHalfBlack,
/// \u{2beb}: '⯫'
StarWithRightHalfBlack,
/// \u{2bec}: '⯬'
LeftwardsTwoDashHeadedArrowWithTriangleArrowheads,
/// \u{2bed}: '⯭'
UpwardsTwoDashHeadedArrowWithTriangleArrowheads,
/// \u{2bee}: '⯮'
RightwardsTwoDashHeadedArrowWithTriangleArrowheads,
/// \u{2bef}: '⯯'
DownwardsTwoDashHeadedArrowWithTriangleArrowheads,
/// \u{2bf0}: '⯰'
ErisFormOne,
/// \u{2bf1}: '⯱'
ErisFormTwo,
/// \u{2bf2}: '⯲'
Sedna,
/// \u{2bf3}: '⯳'
RussianAstrologicalSymbolVigintile,
/// \u{2bf4}: '⯴'
RussianAstrologicalSymbolNovile,
/// \u{2bf5}: '⯵'
RussianAstrologicalSymbolQuintile,
/// \u{2bf6}: '⯶'
RussianAstrologicalSymbolBinovile,
/// \u{2bf7}: '⯷'
RussianAstrologicalSymbolSentagon,
/// \u{2bf8}: '⯸'
RussianAstrologicalSymbolTredecile,
/// \u{2bf9}: '⯹'
EqualsSignWithInfinityBelow,
/// \u{2bfa}: '⯺'
UnitedSymbol,
/// \u{2bfb}: '⯻'
SeparatedSymbol,
/// \u{2bfc}: '⯼'
DoubledSymbol,
/// \u{2bfd}: '⯽'
PassedSymbol,
/// \u{2bfe}: '⯾'
ReversedRightAngle,
}
impl Into<char> for MiscellaneousSymbolsandArrows {
fn into(self) -> char {
match self {
MiscellaneousSymbolsandArrows::NorthEastWhiteArrow => '⬀',
MiscellaneousSymbolsandArrows::NorthWestWhiteArrow => '⬁',
MiscellaneousSymbolsandArrows::SouthEastWhiteArrow => '⬂',
MiscellaneousSymbolsandArrows::SouthWestWhiteArrow => '⬃',
MiscellaneousSymbolsandArrows::LeftRightWhiteArrow => '⬄',
MiscellaneousSymbolsandArrows::LeftwardsBlackArrow => '⬅',
MiscellaneousSymbolsandArrows::UpwardsBlackArrow => '⬆',
MiscellaneousSymbolsandArrows::DownwardsBlackArrow => '⬇',
MiscellaneousSymbolsandArrows::NorthEastBlackArrow => '⬈',
MiscellaneousSymbolsandArrows::NorthWestBlackArrow => '⬉',
MiscellaneousSymbolsandArrows::SouthEastBlackArrow => '⬊',
MiscellaneousSymbolsandArrows::SouthWestBlackArrow => '⬋',
MiscellaneousSymbolsandArrows::LeftRightBlackArrow => '⬌',
MiscellaneousSymbolsandArrows::UpDownBlackArrow => '⬍',
MiscellaneousSymbolsandArrows::RightwardsArrowWithTipDownwards => '⬎',
MiscellaneousSymbolsandArrows::RightwardsArrowWithTipUpwards => '⬏',
MiscellaneousSymbolsandArrows::LeftwardsArrowWithTipDownwards => '⬐',
MiscellaneousSymbolsandArrows::LeftwardsArrowWithTipUpwards => '⬑',
MiscellaneousSymbolsandArrows::SquareWithTopHalfBlack => '⬒',
MiscellaneousSymbolsandArrows::SquareWithBottomHalfBlack => '⬓',
MiscellaneousSymbolsandArrows::SquareWithUpperRightDiagonalHalfBlack => '⬔',
MiscellaneousSymbolsandArrows::SquareWithLowerLeftDiagonalHalfBlack => '⬕',
MiscellaneousSymbolsandArrows::DiamondWithLeftHalfBlack => '⬖',
MiscellaneousSymbolsandArrows::DiamondWithRightHalfBlack => '⬗',
MiscellaneousSymbolsandArrows::DiamondWithTopHalfBlack => '⬘',
MiscellaneousSymbolsandArrows::DiamondWithBottomHalfBlack => '⬙',
MiscellaneousSymbolsandArrows::DottedSquare => '⬚',
MiscellaneousSymbolsandArrows::BlackLargeSquare => '⬛',
MiscellaneousSymbolsandArrows::WhiteLargeSquare => '⬜',
MiscellaneousSymbolsandArrows::BlackVerySmallSquare => '⬝',
MiscellaneousSymbolsandArrows::WhiteVerySmallSquare => '⬞',
MiscellaneousSymbolsandArrows::BlackPentagon => '⬟',
MiscellaneousSymbolsandArrows::WhitePentagon => '⬠',
MiscellaneousSymbolsandArrows::WhiteHexagon => '⬡',
MiscellaneousSymbolsandArrows::BlackHexagon => '⬢',
MiscellaneousSymbolsandArrows::HorizontalBlackHexagon => '⬣',
MiscellaneousSymbolsandArrows::BlackLargeCircle => '⬤',
MiscellaneousSymbolsandArrows::BlackMediumDiamond => '⬥',
MiscellaneousSymbolsandArrows::WhiteMediumDiamond => '⬦',
MiscellaneousSymbolsandArrows::BlackMediumLozenge => '⬧',
MiscellaneousSymbolsandArrows::WhiteMediumLozenge => '⬨',
MiscellaneousSymbolsandArrows::BlackSmallDiamond => '⬩',
MiscellaneousSymbolsandArrows::BlackSmallLozenge => '⬪',
MiscellaneousSymbolsandArrows::WhiteSmallLozenge => '⬫',
MiscellaneousSymbolsandArrows::BlackHorizontalEllipse => '⬬',
MiscellaneousSymbolsandArrows::WhiteHorizontalEllipse => '⬭',
MiscellaneousSymbolsandArrows::BlackVerticalEllipse => '⬮',
MiscellaneousSymbolsandArrows::WhiteVerticalEllipse => '⬯',
MiscellaneousSymbolsandArrows::LeftArrowWithSmallCircle => '⬰',
MiscellaneousSymbolsandArrows::ThreeLeftwardsArrows => '⬱',
MiscellaneousSymbolsandArrows::LeftArrowWithCircledPlus => '⬲',
MiscellaneousSymbolsandArrows::LongLeftwardsSquiggleArrow => '⬳',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithVerticalStroke => '⬴',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithDoubleVerticalStroke => '⬵',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowFromBar => '⬶',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedTripleDashArrow => '⬷',
MiscellaneousSymbolsandArrows::LeftwardsArrowWithDottedStem => '⬸',
MiscellaneousSymbolsandArrows::LeftwardsArrowWithTailWithVerticalStroke => '⬹',
MiscellaneousSymbolsandArrows::LeftwardsArrowWithTailWithDoubleVerticalStroke => '⬺',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTail => '⬻',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTailWithVerticalStroke => '⬼',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTailWithDoubleVerticalStroke => '⬽',
MiscellaneousSymbolsandArrows::LeftwardsArrowThroughX => '⬾',
MiscellaneousSymbolsandArrows::WaveArrowPointingDirectlyLeft => '⬿',
MiscellaneousSymbolsandArrows::EqualsSignAboveLeftwardsArrow => '⭀',
MiscellaneousSymbolsandArrows::ReverseTildeOperatorAboveLeftwardsArrow => '⭁',
MiscellaneousSymbolsandArrows::LeftwardsArrowAboveReverseAlmostEqualTo => '⭂',
MiscellaneousSymbolsandArrows::RightwardsArrowThroughGreaterDashThan => '⭃',
MiscellaneousSymbolsandArrows::RightwardsArrowThroughSuperset => '⭄',
MiscellaneousSymbolsandArrows::LeftwardsQuadrupleArrow => '⭅',
MiscellaneousSymbolsandArrows::RightwardsQuadrupleArrow => '⭆',
MiscellaneousSymbolsandArrows::ReverseTildeOperatorAboveRightwardsArrow => '⭇',
MiscellaneousSymbolsandArrows::RightwardsArrowAboveReverseAlmostEqualTo => '⭈',
MiscellaneousSymbolsandArrows::TildeOperatorAboveLeftwardsArrow => '⭉',
MiscellaneousSymbolsandArrows::LeftwardsArrowAboveAlmostEqualTo => '⭊',
MiscellaneousSymbolsandArrows::LeftwardsArrowAboveReverseTildeOperator => '⭋',
MiscellaneousSymbolsandArrows::RightwardsArrowAboveReverseTildeOperator => '⭌',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedZigzagArrow => '⭍',
MiscellaneousSymbolsandArrows::ShortSlantedNorthArrow => '⭎',
MiscellaneousSymbolsandArrows::ShortBackslantedSouthArrow => '⭏',
MiscellaneousSymbolsandArrows::WhiteMediumStar => '⭐',
MiscellaneousSymbolsandArrows::BlackSmallStar => '⭑',
MiscellaneousSymbolsandArrows::WhiteSmallStar => '⭒',
MiscellaneousSymbolsandArrows::BlackRightDashPointingPentagon => '⭓',
MiscellaneousSymbolsandArrows::WhiteRightDashPointingPentagon => '⭔',
MiscellaneousSymbolsandArrows::HeavyLargeCircle => '⭕',
MiscellaneousSymbolsandArrows::HeavyOvalWithOvalInside => '⭖',
MiscellaneousSymbolsandArrows::HeavyCircleWithCircleInside => '⭗',
MiscellaneousSymbolsandArrows::HeavyCircle => '⭘',
MiscellaneousSymbolsandArrows::HeavyCircledSaltire => '⭙',
MiscellaneousSymbolsandArrows::SlantedNorthArrowWithHookedHead => '⭚',
MiscellaneousSymbolsandArrows::BackslantedSouthArrowWithHookedTail => '⭛',
MiscellaneousSymbolsandArrows::SlantedNorthArrowWithHorizontalTail => '⭜',
MiscellaneousSymbolsandArrows::BackslantedSouthArrowWithHorizontalTail => '⭝',
MiscellaneousSymbolsandArrows::BentArrowPointingDownwardsThenNorthEast => '⭞',
MiscellaneousSymbolsandArrows::ShortBentArrowPointingDownwardsThenNorthEast => '⭟',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrow => '⭠',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrow => '⭡',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrow => '⭢',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrow => '⭣',
MiscellaneousSymbolsandArrows::LeftRightTriangleDashHeadedArrow => '⭤',
MiscellaneousSymbolsandArrows::UpDownTriangleDashHeadedArrow => '⭥',
MiscellaneousSymbolsandArrows::NorthWestTriangleDashHeadedArrow => '⭦',
MiscellaneousSymbolsandArrows::NorthEastTriangleDashHeadedArrow => '⭧',
MiscellaneousSymbolsandArrows::SouthEastTriangleDashHeadedArrow => '⭨',
MiscellaneousSymbolsandArrows::SouthWestTriangleDashHeadedArrow => '⭩',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedDashedArrow => '⭪',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedDashedArrow => '⭫',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedDashedArrow => '⭬',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedDashedArrow => '⭭',
MiscellaneousSymbolsandArrows::ClockwiseTriangleDashHeadedOpenCircleArrow => '⭮',
MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedOpenCircleArrow => '⭯',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowToBar => '⭰',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowToBar => '⭱',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowToBar => '⭲',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowToBar => '⭳',
MiscellaneousSymbolsandArrows::NorthWestTriangleDashHeadedArrowToBar => '⭶',
MiscellaneousSymbolsandArrows::NorthEastTriangleDashHeadedArrowToBar => '⭷',
MiscellaneousSymbolsandArrows::SouthEastTriangleDashHeadedArrowToBar => '⭸',
MiscellaneousSymbolsandArrows::SouthWestTriangleDashHeadedArrowToBar => '⭹',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke => '⭺',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke => '⭻',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke => '⭼',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke => '⭽',
MiscellaneousSymbolsandArrows::HorizontalTabKey => '⭾',
MiscellaneousSymbolsandArrows::VerticalTabKey => '⭿',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowOverRightwardsTriangleDashHeadedArrow => '⮀',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowLeftwardsOfDownwardsTriangleDashHeadedArrow => '⮁',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowOverLeftwardsTriangleDashHeadedArrow => '⮂',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowLeftwardsOfUpwardsTriangleDashHeadedArrow => '⮃',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedPairedArrows => '⮄',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedPairedArrows => '⮅',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedPairedArrows => '⮆',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedPairedArrows => '⮇',
MiscellaneousSymbolsandArrows::LeftwardsBlackCircledWhiteArrow => '⮈',
MiscellaneousSymbolsandArrows::UpwardsBlackCircledWhiteArrow => '⮉',
MiscellaneousSymbolsandArrows::RightwardsBlackCircledWhiteArrow => '⮊',
MiscellaneousSymbolsandArrows::DownwardsBlackCircledWhiteArrow => '⮋',
MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedRightUDashShapedArrow => '⮌',
MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedBottomUDashShapedArrow => '⮍',
MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedLeftUDashShapedArrow => '⮎',
MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedTopUDashShapedArrow => '⮏',
MiscellaneousSymbolsandArrows::ReturnLeft => '⮐',
MiscellaneousSymbolsandArrows::ReturnRight => '⮑',
MiscellaneousSymbolsandArrows::NewlineLeft => '⮒',
MiscellaneousSymbolsandArrows::NewlineRight => '⮓',
MiscellaneousSymbolsandArrows::FourCornerArrowsCirclingAnticlockwise => '⮔',
MiscellaneousSymbolsandArrows::RightwardsBlackArrow => '⮕',
MiscellaneousSymbolsandArrows::ThreeDashDTopDashLightedLeftwardsEquilateralArrowhead => '⮘',
MiscellaneousSymbolsandArrows::ThreeDashDRightDashLightedUpwardsEquilateralArrowhead => '⮙',
MiscellaneousSymbolsandArrows::ThreeDashDTopDashLightedRightwardsEquilateralArrowhead => '⮚',
MiscellaneousSymbolsandArrows::ThreeDashDLeftDashLightedDownwardsEquilateralArrowhead => '⮛',
MiscellaneousSymbolsandArrows::BlackLeftwardsEquilateralArrowhead => '⮜',
MiscellaneousSymbolsandArrows::BlackUpwardsEquilateralArrowhead => '⮝',
MiscellaneousSymbolsandArrows::BlackRightwardsEquilateralArrowhead => '⮞',
MiscellaneousSymbolsandArrows::BlackDownwardsEquilateralArrowhead => '⮟',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowWithLongTipLeftwards => '⮠',
MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowWithLongTipRightwards => '⮡',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowWithLongTipLeftwards => '⮢',
MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowWithLongTipRightwards => '⮣',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowWithLongTipUpwards => '⮤',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowWithLongTipUpwards => '⮥',
MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowWithLongTipDownwards => '⮦',
MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowWithLongTipDownwards => '⮧',
MiscellaneousSymbolsandArrows::BlackCurvedDownwardsAndLeftwardsArrow => '⮨',
MiscellaneousSymbolsandArrows::BlackCurvedDownwardsAndRightwardsArrow => '⮩',
MiscellaneousSymbolsandArrows::BlackCurvedUpwardsAndLeftwardsArrow => '⮪',
MiscellaneousSymbolsandArrows::BlackCurvedUpwardsAndRightwardsArrow => '⮫',
MiscellaneousSymbolsandArrows::BlackCurvedLeftwardsAndUpwardsArrow => '⮬',
MiscellaneousSymbolsandArrows::BlackCurvedRightwardsAndUpwardsArrow => '⮭',
MiscellaneousSymbolsandArrows::BlackCurvedLeftwardsAndDownwardsArrow => '⮮',
MiscellaneousSymbolsandArrows::BlackCurvedRightwardsAndDownwardsArrow => '⮯',
MiscellaneousSymbolsandArrows::RibbonArrowDownLeft => '⮰',
MiscellaneousSymbolsandArrows::RibbonArrowDownRight => '⮱',
MiscellaneousSymbolsandArrows::RibbonArrowUpLeft => '⮲',
MiscellaneousSymbolsandArrows::RibbonArrowUpRight => '⮳',
MiscellaneousSymbolsandArrows::RibbonArrowLeftUp => '⮴',
MiscellaneousSymbolsandArrows::RibbonArrowRightUp => '⮵',
MiscellaneousSymbolsandArrows::RibbonArrowLeftDown => '⮶',
MiscellaneousSymbolsandArrows::RibbonArrowRightDown => '⮷',
MiscellaneousSymbolsandArrows::UpwardsWhiteArrowFromBarWithHorizontalBar => '⮸',
MiscellaneousSymbolsandArrows::UpArrowheadInARectangleBox => '⮹',
MiscellaneousSymbolsandArrows::OverlappingWhiteSquares => '⮺',
MiscellaneousSymbolsandArrows::OverlappingWhiteAndBlackSquares => '⮻',
MiscellaneousSymbolsandArrows::OverlappingBlackSquares => '⮼',
MiscellaneousSymbolsandArrows::BallotBoxWithLightX => '⮽',
MiscellaneousSymbolsandArrows::CircledX => '⮾',
MiscellaneousSymbolsandArrows::CircledBoldX => '⮿',
MiscellaneousSymbolsandArrows::BlackSquareCentred => '⯀',
MiscellaneousSymbolsandArrows::BlackDiamondCentred => '⯁',
MiscellaneousSymbolsandArrows::TurnedBlackPentagon => '⯂',
MiscellaneousSymbolsandArrows::HorizontalBlackOctagon => '⯃',
MiscellaneousSymbolsandArrows::BlackOctagon => '⯄',
MiscellaneousSymbolsandArrows::BlackMediumUpDashPointingTriangleCentred => '⯅',
MiscellaneousSymbolsandArrows::BlackMediumDownDashPointingTriangleCentred => '⯆',
MiscellaneousSymbolsandArrows::BlackMediumLeftDashPointingTriangleCentred => '⯇',
MiscellaneousSymbolsandArrows::BlackMediumRightDashPointingTriangleCentred => '⯈',
MiscellaneousSymbolsandArrows::NeptuneFormTwo => '⯉',
MiscellaneousSymbolsandArrows::TopHalfBlackCircle => '⯊',
MiscellaneousSymbolsandArrows::BottomHalfBlackCircle => '⯋',
MiscellaneousSymbolsandArrows::LightFourPointedBlackCusp => '⯌',
MiscellaneousSymbolsandArrows::RotatedLightFourPointedBlackCusp => '⯍',
MiscellaneousSymbolsandArrows::WhiteFourPointedCusp => '⯎',
MiscellaneousSymbolsandArrows::RotatedWhiteFourPointedCusp => '⯏',
MiscellaneousSymbolsandArrows::SquarePositionIndicator => '⯐',
MiscellaneousSymbolsandArrows::UncertaintySign => '⯑',
MiscellaneousSymbolsandArrows::GroupMark => '⯒',
MiscellaneousSymbolsandArrows::PlutoFormTwo => '⯓',
MiscellaneousSymbolsandArrows::PlutoFormThree => '⯔',
MiscellaneousSymbolsandArrows::PlutoFormFour => '⯕',
MiscellaneousSymbolsandArrows::PlutoFormFive => '⯖',
MiscellaneousSymbolsandArrows::Transpluto => '⯗',
MiscellaneousSymbolsandArrows::Proserpina => '⯘',
MiscellaneousSymbolsandArrows::Astraea => '⯙',
MiscellaneousSymbolsandArrows::Hygiea => '⯚',
MiscellaneousSymbolsandArrows::Pholus => '⯛',
MiscellaneousSymbolsandArrows::Nessus => '⯜',
MiscellaneousSymbolsandArrows::WhiteMoonSelena => '⯝',
MiscellaneousSymbolsandArrows::BlackDiamondOnCross => '⯞',
MiscellaneousSymbolsandArrows::TrueLightMoonArta => '⯟',
MiscellaneousSymbolsandArrows::Cupido => '⯠',
MiscellaneousSymbolsandArrows::Hades => '⯡',
MiscellaneousSymbolsandArrows::Zeus => '⯢',
MiscellaneousSymbolsandArrows::Kronos => '⯣',
MiscellaneousSymbolsandArrows::Apollon => '⯤',
MiscellaneousSymbolsandArrows::Admetos => '⯥',
MiscellaneousSymbolsandArrows::Vulcanus => '⯦',
MiscellaneousSymbolsandArrows::Poseidon => '⯧',
MiscellaneousSymbolsandArrows::LeftHalfBlackStar => '⯨',
MiscellaneousSymbolsandArrows::RightHalfBlackStar => '⯩',
MiscellaneousSymbolsandArrows::StarWithLeftHalfBlack => '⯪',
MiscellaneousSymbolsandArrows::StarWithRightHalfBlack => '⯫',
MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTriangleArrowheads => '⯬',
MiscellaneousSymbolsandArrows::UpwardsTwoDashHeadedArrowWithTriangleArrowheads => '⯭',
MiscellaneousSymbolsandArrows::RightwardsTwoDashHeadedArrowWithTriangleArrowheads => '⯮',
MiscellaneousSymbolsandArrows::DownwardsTwoDashHeadedArrowWithTriangleArrowheads => '⯯',
MiscellaneousSymbolsandArrows::ErisFormOne => '⯰',
MiscellaneousSymbolsandArrows::ErisFormTwo => '⯱',
MiscellaneousSymbolsandArrows::Sedna => '⯲',
MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolVigintile => '⯳',
MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolNovile => '⯴',
MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolQuintile => '⯵',
MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolBinovile => '⯶',
MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolSentagon => '⯷',
MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolTredecile => '⯸',
MiscellaneousSymbolsandArrows::EqualsSignWithInfinityBelow => '⯹',
MiscellaneousSymbolsandArrows::UnitedSymbol => '⯺',
MiscellaneousSymbolsandArrows::SeparatedSymbol => '⯻',
MiscellaneousSymbolsandArrows::DoubledSymbol => '⯼',
MiscellaneousSymbolsandArrows::PassedSymbol => '⯽',
MiscellaneousSymbolsandArrows::ReversedRightAngle => '⯾',
}
}
}
impl std::convert::TryFrom<char> for MiscellaneousSymbolsandArrows {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'⬀' => Ok(MiscellaneousSymbolsandArrows::NorthEastWhiteArrow),
'⬁' => Ok(MiscellaneousSymbolsandArrows::NorthWestWhiteArrow),
'⬂' => Ok(MiscellaneousSymbolsandArrows::SouthEastWhiteArrow),
'⬃' => Ok(MiscellaneousSymbolsandArrows::SouthWestWhiteArrow),
'⬄' => Ok(MiscellaneousSymbolsandArrows::LeftRightWhiteArrow),
'⬅' => Ok(MiscellaneousSymbolsandArrows::LeftwardsBlackArrow),
'⬆' => Ok(MiscellaneousSymbolsandArrows::UpwardsBlackArrow),
'⬇' => Ok(MiscellaneousSymbolsandArrows::DownwardsBlackArrow),
'⬈' => Ok(MiscellaneousSymbolsandArrows::NorthEastBlackArrow),
'⬉' => Ok(MiscellaneousSymbolsandArrows::NorthWestBlackArrow),
'⬊' => Ok(MiscellaneousSymbolsandArrows::SouthEastBlackArrow),
'⬋' => Ok(MiscellaneousSymbolsandArrows::SouthWestBlackArrow),
'⬌' => Ok(MiscellaneousSymbolsandArrows::LeftRightBlackArrow),
'⬍' => Ok(MiscellaneousSymbolsandArrows::UpDownBlackArrow),
'⬎' => Ok(MiscellaneousSymbolsandArrows::RightwardsArrowWithTipDownwards),
'⬏' => Ok(MiscellaneousSymbolsandArrows::RightwardsArrowWithTipUpwards),
'⬐' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowWithTipDownwards),
'⬑' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowWithTipUpwards),
'⬒' => Ok(MiscellaneousSymbolsandArrows::SquareWithTopHalfBlack),
'⬓' => Ok(MiscellaneousSymbolsandArrows::SquareWithBottomHalfBlack),
'⬔' => Ok(MiscellaneousSymbolsandArrows::SquareWithUpperRightDiagonalHalfBlack),
'⬕' => Ok(MiscellaneousSymbolsandArrows::SquareWithLowerLeftDiagonalHalfBlack),
'⬖' => Ok(MiscellaneousSymbolsandArrows::DiamondWithLeftHalfBlack),
'⬗' => Ok(MiscellaneousSymbolsandArrows::DiamondWithRightHalfBlack),
'⬘' => Ok(MiscellaneousSymbolsandArrows::DiamondWithTopHalfBlack),
'⬙' => Ok(MiscellaneousSymbolsandArrows::DiamondWithBottomHalfBlack),
'⬚' => Ok(MiscellaneousSymbolsandArrows::DottedSquare),
'⬛' => Ok(MiscellaneousSymbolsandArrows::BlackLargeSquare),
'⬜' => Ok(MiscellaneousSymbolsandArrows::WhiteLargeSquare),
'⬝' => Ok(MiscellaneousSymbolsandArrows::BlackVerySmallSquare),
'⬞' => Ok(MiscellaneousSymbolsandArrows::WhiteVerySmallSquare),
'⬟' => Ok(MiscellaneousSymbolsandArrows::BlackPentagon),
'⬠' => Ok(MiscellaneousSymbolsandArrows::WhitePentagon),
'⬡' => Ok(MiscellaneousSymbolsandArrows::WhiteHexagon),
'⬢' => Ok(MiscellaneousSymbolsandArrows::BlackHexagon),
'⬣' => Ok(MiscellaneousSymbolsandArrows::HorizontalBlackHexagon),
'⬤' => Ok(MiscellaneousSymbolsandArrows::BlackLargeCircle),
'⬥' => Ok(MiscellaneousSymbolsandArrows::BlackMediumDiamond),
'⬦' => Ok(MiscellaneousSymbolsandArrows::WhiteMediumDiamond),
'⬧' => Ok(MiscellaneousSymbolsandArrows::BlackMediumLozenge),
'⬨' => Ok(MiscellaneousSymbolsandArrows::WhiteMediumLozenge),
'⬩' => Ok(MiscellaneousSymbolsandArrows::BlackSmallDiamond),
'⬪' => Ok(MiscellaneousSymbolsandArrows::BlackSmallLozenge),
'⬫' => Ok(MiscellaneousSymbolsandArrows::WhiteSmallLozenge),
'⬬' => Ok(MiscellaneousSymbolsandArrows::BlackHorizontalEllipse),
'⬭' => Ok(MiscellaneousSymbolsandArrows::WhiteHorizontalEllipse),
'⬮' => Ok(MiscellaneousSymbolsandArrows::BlackVerticalEllipse),
'⬯' => Ok(MiscellaneousSymbolsandArrows::WhiteVerticalEllipse),
'⬰' => Ok(MiscellaneousSymbolsandArrows::LeftArrowWithSmallCircle),
'⬱' => Ok(MiscellaneousSymbolsandArrows::ThreeLeftwardsArrows),
'⬲' => Ok(MiscellaneousSymbolsandArrows::LeftArrowWithCircledPlus),
'⬳' => Ok(MiscellaneousSymbolsandArrows::LongLeftwardsSquiggleArrow),
'⬴' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithVerticalStroke),
'⬵' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithDoubleVerticalStroke),
'⬶' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowFromBar),
'⬷' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedTripleDashArrow),
'⬸' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowWithDottedStem),
'⬹' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowWithTailWithVerticalStroke),
'⬺' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowWithTailWithDoubleVerticalStroke),
'⬻' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTail),
'⬼' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTailWithVerticalStroke),
'⬽' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTailWithDoubleVerticalStroke),
'⬾' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowThroughX),
'⬿' => Ok(MiscellaneousSymbolsandArrows::WaveArrowPointingDirectlyLeft),
'⭀' => Ok(MiscellaneousSymbolsandArrows::EqualsSignAboveLeftwardsArrow),
'⭁' => Ok(MiscellaneousSymbolsandArrows::ReverseTildeOperatorAboveLeftwardsArrow),
'⭂' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowAboveReverseAlmostEqualTo),
'⭃' => Ok(MiscellaneousSymbolsandArrows::RightwardsArrowThroughGreaterDashThan),
'⭄' => Ok(MiscellaneousSymbolsandArrows::RightwardsArrowThroughSuperset),
'⭅' => Ok(MiscellaneousSymbolsandArrows::LeftwardsQuadrupleArrow),
'⭆' => Ok(MiscellaneousSymbolsandArrows::RightwardsQuadrupleArrow),
'⭇' => Ok(MiscellaneousSymbolsandArrows::ReverseTildeOperatorAboveRightwardsArrow),
'⭈' => Ok(MiscellaneousSymbolsandArrows::RightwardsArrowAboveReverseAlmostEqualTo),
'⭉' => Ok(MiscellaneousSymbolsandArrows::TildeOperatorAboveLeftwardsArrow),
'⭊' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowAboveAlmostEqualTo),
'⭋' => Ok(MiscellaneousSymbolsandArrows::LeftwardsArrowAboveReverseTildeOperator),
'⭌' => Ok(MiscellaneousSymbolsandArrows::RightwardsArrowAboveReverseTildeOperator),
'⭍' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedZigzagArrow),
'⭎' => Ok(MiscellaneousSymbolsandArrows::ShortSlantedNorthArrow),
'⭏' => Ok(MiscellaneousSymbolsandArrows::ShortBackslantedSouthArrow),
'⭐' => Ok(MiscellaneousSymbolsandArrows::WhiteMediumStar),
'⭑' => Ok(MiscellaneousSymbolsandArrows::BlackSmallStar),
'⭒' => Ok(MiscellaneousSymbolsandArrows::WhiteSmallStar),
'⭓' => Ok(MiscellaneousSymbolsandArrows::BlackRightDashPointingPentagon),
'⭔' => Ok(MiscellaneousSymbolsandArrows::WhiteRightDashPointingPentagon),
'⭕' => Ok(MiscellaneousSymbolsandArrows::HeavyLargeCircle),
'⭖' => Ok(MiscellaneousSymbolsandArrows::HeavyOvalWithOvalInside),
'⭗' => Ok(MiscellaneousSymbolsandArrows::HeavyCircleWithCircleInside),
'⭘' => Ok(MiscellaneousSymbolsandArrows::HeavyCircle),
'⭙' => Ok(MiscellaneousSymbolsandArrows::HeavyCircledSaltire),
'⭚' => Ok(MiscellaneousSymbolsandArrows::SlantedNorthArrowWithHookedHead),
'⭛' => Ok(MiscellaneousSymbolsandArrows::BackslantedSouthArrowWithHookedTail),
'⭜' => Ok(MiscellaneousSymbolsandArrows::SlantedNorthArrowWithHorizontalTail),
'⭝' => Ok(MiscellaneousSymbolsandArrows::BackslantedSouthArrowWithHorizontalTail),
'⭞' => Ok(MiscellaneousSymbolsandArrows::BentArrowPointingDownwardsThenNorthEast),
'⭟' => Ok(MiscellaneousSymbolsandArrows::ShortBentArrowPointingDownwardsThenNorthEast),
'⭠' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrow),
'⭡' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrow),
'⭢' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrow),
'⭣' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrow),
'⭤' => Ok(MiscellaneousSymbolsandArrows::LeftRightTriangleDashHeadedArrow),
'⭥' => Ok(MiscellaneousSymbolsandArrows::UpDownTriangleDashHeadedArrow),
'⭦' => Ok(MiscellaneousSymbolsandArrows::NorthWestTriangleDashHeadedArrow),
'⭧' => Ok(MiscellaneousSymbolsandArrows::NorthEastTriangleDashHeadedArrow),
'⭨' => Ok(MiscellaneousSymbolsandArrows::SouthEastTriangleDashHeadedArrow),
'⭩' => Ok(MiscellaneousSymbolsandArrows::SouthWestTriangleDashHeadedArrow),
'⭪' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedDashedArrow),
'⭫' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedDashedArrow),
'⭬' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedDashedArrow),
'⭭' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedDashedArrow),
'⭮' => Ok(MiscellaneousSymbolsandArrows::ClockwiseTriangleDashHeadedOpenCircleArrow),
'⭯' => Ok(MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedOpenCircleArrow),
'⭰' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowToBar),
'⭱' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowToBar),
'⭲' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowToBar),
'⭳' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowToBar),
'⭶' => Ok(MiscellaneousSymbolsandArrows::NorthWestTriangleDashHeadedArrowToBar),
'⭷' => Ok(MiscellaneousSymbolsandArrows::NorthEastTriangleDashHeadedArrowToBar),
'⭸' => Ok(MiscellaneousSymbolsandArrows::SouthEastTriangleDashHeadedArrowToBar),
'⭹' => Ok(MiscellaneousSymbolsandArrows::SouthWestTriangleDashHeadedArrowToBar),
'⭺' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke),
'⭻' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke),
'⭼' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke),
'⭽' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowWithDoubleHorizontalStroke),
'⭾' => Ok(MiscellaneousSymbolsandArrows::HorizontalTabKey),
'⭿' => Ok(MiscellaneousSymbolsandArrows::VerticalTabKey),
'⮀' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowOverRightwardsTriangleDashHeadedArrow),
'⮁' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowLeftwardsOfDownwardsTriangleDashHeadedArrow),
'⮂' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowOverLeftwardsTriangleDashHeadedArrow),
'⮃' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowLeftwardsOfUpwardsTriangleDashHeadedArrow),
'⮄' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedPairedArrows),
'⮅' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedPairedArrows),
'⮆' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedPairedArrows),
'⮇' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedPairedArrows),
'⮈' => Ok(MiscellaneousSymbolsandArrows::LeftwardsBlackCircledWhiteArrow),
'⮉' => Ok(MiscellaneousSymbolsandArrows::UpwardsBlackCircledWhiteArrow),
'⮊' => Ok(MiscellaneousSymbolsandArrows::RightwardsBlackCircledWhiteArrow),
'⮋' => Ok(MiscellaneousSymbolsandArrows::DownwardsBlackCircledWhiteArrow),
'⮌' => Ok(MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedRightUDashShapedArrow),
'⮍' => Ok(MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedBottomUDashShapedArrow),
'⮎' => Ok(MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedLeftUDashShapedArrow),
'⮏' => Ok(MiscellaneousSymbolsandArrows::AnticlockwiseTriangleDashHeadedTopUDashShapedArrow),
'⮐' => Ok(MiscellaneousSymbolsandArrows::ReturnLeft),
'⮑' => Ok(MiscellaneousSymbolsandArrows::ReturnRight),
'⮒' => Ok(MiscellaneousSymbolsandArrows::NewlineLeft),
'⮓' => Ok(MiscellaneousSymbolsandArrows::NewlineRight),
'⮔' => Ok(MiscellaneousSymbolsandArrows::FourCornerArrowsCirclingAnticlockwise),
'⮕' => Ok(MiscellaneousSymbolsandArrows::RightwardsBlackArrow),
'⮘' => Ok(MiscellaneousSymbolsandArrows::ThreeDashDTopDashLightedLeftwardsEquilateralArrowhead),
'⮙' => Ok(MiscellaneousSymbolsandArrows::ThreeDashDRightDashLightedUpwardsEquilateralArrowhead),
'⮚' => Ok(MiscellaneousSymbolsandArrows::ThreeDashDTopDashLightedRightwardsEquilateralArrowhead),
'⮛' => Ok(MiscellaneousSymbolsandArrows::ThreeDashDLeftDashLightedDownwardsEquilateralArrowhead),
'⮜' => Ok(MiscellaneousSymbolsandArrows::BlackLeftwardsEquilateralArrowhead),
'⮝' => Ok(MiscellaneousSymbolsandArrows::BlackUpwardsEquilateralArrowhead),
'⮞' => Ok(MiscellaneousSymbolsandArrows::BlackRightwardsEquilateralArrowhead),
'⮟' => Ok(MiscellaneousSymbolsandArrows::BlackDownwardsEquilateralArrowhead),
'⮠' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowWithLongTipLeftwards),
'⮡' => Ok(MiscellaneousSymbolsandArrows::DownwardsTriangleDashHeadedArrowWithLongTipRightwards),
'⮢' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowWithLongTipLeftwards),
'⮣' => Ok(MiscellaneousSymbolsandArrows::UpwardsTriangleDashHeadedArrowWithLongTipRightwards),
'⮤' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowWithLongTipUpwards),
'⮥' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowWithLongTipUpwards),
'⮦' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTriangleDashHeadedArrowWithLongTipDownwards),
'⮧' => Ok(MiscellaneousSymbolsandArrows::RightwardsTriangleDashHeadedArrowWithLongTipDownwards),
'⮨' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedDownwardsAndLeftwardsArrow),
'⮩' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedDownwardsAndRightwardsArrow),
'⮪' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedUpwardsAndLeftwardsArrow),
'⮫' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedUpwardsAndRightwardsArrow),
'⮬' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedLeftwardsAndUpwardsArrow),
'⮭' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedRightwardsAndUpwardsArrow),
'⮮' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedLeftwardsAndDownwardsArrow),
'⮯' => Ok(MiscellaneousSymbolsandArrows::BlackCurvedRightwardsAndDownwardsArrow),
'⮰' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowDownLeft),
'⮱' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowDownRight),
'⮲' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowUpLeft),
'⮳' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowUpRight),
'⮴' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowLeftUp),
'⮵' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowRightUp),
'⮶' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowLeftDown),
'⮷' => Ok(MiscellaneousSymbolsandArrows::RibbonArrowRightDown),
'⮸' => Ok(MiscellaneousSymbolsandArrows::UpwardsWhiteArrowFromBarWithHorizontalBar),
'⮹' => Ok(MiscellaneousSymbolsandArrows::UpArrowheadInARectangleBox),
'⮺' => Ok(MiscellaneousSymbolsandArrows::OverlappingWhiteSquares),
'⮻' => Ok(MiscellaneousSymbolsandArrows::OverlappingWhiteAndBlackSquares),
'⮼' => Ok(MiscellaneousSymbolsandArrows::OverlappingBlackSquares),
'⮽' => Ok(MiscellaneousSymbolsandArrows::BallotBoxWithLightX),
'⮾' => Ok(MiscellaneousSymbolsandArrows::CircledX),
'⮿' => Ok(MiscellaneousSymbolsandArrows::CircledBoldX),
'⯀' => Ok(MiscellaneousSymbolsandArrows::BlackSquareCentred),
'⯁' => Ok(MiscellaneousSymbolsandArrows::BlackDiamondCentred),
'⯂' => Ok(MiscellaneousSymbolsandArrows::TurnedBlackPentagon),
'⯃' => Ok(MiscellaneousSymbolsandArrows::HorizontalBlackOctagon),
'⯄' => Ok(MiscellaneousSymbolsandArrows::BlackOctagon),
'⯅' => Ok(MiscellaneousSymbolsandArrows::BlackMediumUpDashPointingTriangleCentred),
'⯆' => Ok(MiscellaneousSymbolsandArrows::BlackMediumDownDashPointingTriangleCentred),
'⯇' => Ok(MiscellaneousSymbolsandArrows::BlackMediumLeftDashPointingTriangleCentred),
'⯈' => Ok(MiscellaneousSymbolsandArrows::BlackMediumRightDashPointingTriangleCentred),
'⯉' => Ok(MiscellaneousSymbolsandArrows::NeptuneFormTwo),
'⯊' => Ok(MiscellaneousSymbolsandArrows::TopHalfBlackCircle),
'⯋' => Ok(MiscellaneousSymbolsandArrows::BottomHalfBlackCircle),
'⯌' => Ok(MiscellaneousSymbolsandArrows::LightFourPointedBlackCusp),
'⯍' => Ok(MiscellaneousSymbolsandArrows::RotatedLightFourPointedBlackCusp),
'⯎' => Ok(MiscellaneousSymbolsandArrows::WhiteFourPointedCusp),
'⯏' => Ok(MiscellaneousSymbolsandArrows::RotatedWhiteFourPointedCusp),
'⯐' => Ok(MiscellaneousSymbolsandArrows::SquarePositionIndicator),
'⯑' => Ok(MiscellaneousSymbolsandArrows::UncertaintySign),
'⯒' => Ok(MiscellaneousSymbolsandArrows::GroupMark),
'⯓' => Ok(MiscellaneousSymbolsandArrows::PlutoFormTwo),
'⯔' => Ok(MiscellaneousSymbolsandArrows::PlutoFormThree),
'⯕' => Ok(MiscellaneousSymbolsandArrows::PlutoFormFour),
'⯖' => Ok(MiscellaneousSymbolsandArrows::PlutoFormFive),
'⯗' => Ok(MiscellaneousSymbolsandArrows::Transpluto),
'⯘' => Ok(MiscellaneousSymbolsandArrows::Proserpina),
'⯙' => Ok(MiscellaneousSymbolsandArrows::Astraea),
'⯚' => Ok(MiscellaneousSymbolsandArrows::Hygiea),
'⯛' => Ok(MiscellaneousSymbolsandArrows::Pholus),
'⯜' => Ok(MiscellaneousSymbolsandArrows::Nessus),
'⯝' => Ok(MiscellaneousSymbolsandArrows::WhiteMoonSelena),
'⯞' => Ok(MiscellaneousSymbolsandArrows::BlackDiamondOnCross),
'⯟' => Ok(MiscellaneousSymbolsandArrows::TrueLightMoonArta),
'⯠' => Ok(MiscellaneousSymbolsandArrows::Cupido),
'⯡' => Ok(MiscellaneousSymbolsandArrows::Hades),
'⯢' => Ok(MiscellaneousSymbolsandArrows::Zeus),
'⯣' => Ok(MiscellaneousSymbolsandArrows::Kronos),
'⯤' => Ok(MiscellaneousSymbolsandArrows::Apollon),
'⯥' => Ok(MiscellaneousSymbolsandArrows::Admetos),
'⯦' => Ok(MiscellaneousSymbolsandArrows::Vulcanus),
'⯧' => Ok(MiscellaneousSymbolsandArrows::Poseidon),
'⯨' => Ok(MiscellaneousSymbolsandArrows::LeftHalfBlackStar),
'⯩' => Ok(MiscellaneousSymbolsandArrows::RightHalfBlackStar),
'⯪' => Ok(MiscellaneousSymbolsandArrows::StarWithLeftHalfBlack),
'⯫' => Ok(MiscellaneousSymbolsandArrows::StarWithRightHalfBlack),
'⯬' => Ok(MiscellaneousSymbolsandArrows::LeftwardsTwoDashHeadedArrowWithTriangleArrowheads),
'⯭' => Ok(MiscellaneousSymbolsandArrows::UpwardsTwoDashHeadedArrowWithTriangleArrowheads),
'⯮' => Ok(MiscellaneousSymbolsandArrows::RightwardsTwoDashHeadedArrowWithTriangleArrowheads),
'⯯' => Ok(MiscellaneousSymbolsandArrows::DownwardsTwoDashHeadedArrowWithTriangleArrowheads),
'⯰' => Ok(MiscellaneousSymbolsandArrows::ErisFormOne),
'⯱' => Ok(MiscellaneousSymbolsandArrows::ErisFormTwo),
'⯲' => Ok(MiscellaneousSymbolsandArrows::Sedna),
'⯳' => Ok(MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolVigintile),
'⯴' => Ok(MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolNovile),
'⯵' => Ok(MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolQuintile),
'⯶' => Ok(MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolBinovile),
'⯷' => Ok(MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolSentagon),
'⯸' => Ok(MiscellaneousSymbolsandArrows::RussianAstrologicalSymbolTredecile),
'⯹' => Ok(MiscellaneousSymbolsandArrows::EqualsSignWithInfinityBelow),
'⯺' => Ok(MiscellaneousSymbolsandArrows::UnitedSymbol),
'⯻' => Ok(MiscellaneousSymbolsandArrows::SeparatedSymbol),
'⯼' => Ok(MiscellaneousSymbolsandArrows::DoubledSymbol),
'⯽' => Ok(MiscellaneousSymbolsandArrows::PassedSymbol),
'⯾' => Ok(MiscellaneousSymbolsandArrows::ReversedRightAngle),
_ => Err(()),
}
}
}
impl Into<u32> for MiscellaneousSymbolsandArrows {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for MiscellaneousSymbolsandArrows {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for MiscellaneousSymbolsandArrows {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl MiscellaneousSymbolsandArrows {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
MiscellaneousSymbolsandArrows::NorthEastWhiteArrow
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("MiscellaneousSymbolsandArrows{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
extern crate rsh;
use std::env;
fn main() {
let mut argv = env::args();
// skip argv[0]
argv.next();
let s = if let Some(path) = argv.next() {
rsh::State::new(path)
} else {
rsh::State::default()
};
rsh::run(s)
}
|
//! Utility macros for code generation.
#![macro_use]
/// Applies `$fn` to an `VecCopy` mapping valid numeric data types by corresponding generic
/// parameters. For example, passing an `VecCopy` containing data of type `u8` will cause this
/// macro to call `$fn` with type parameter `u8` like `$fn::<u8>(buffer)`.
/// # Examples
/// ```rust
/// # #[macro_use] extern crate data_buffer as buf;
/// # use std::fmt;
/// # use std::any::Any;
/// # use buf::VecCopy;
/// // Implement pretty printing of a `VecCopy` derivative for numeric buffers.
/// struct MyBuffer(VecCopy);
/// impl fmt::Display for MyBuffer {
/// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
/// unsafe fn display_buf<T: Copy + Any + fmt::Display>(buf: &VecCopy, f: &mut fmt::Formatter) {
/// for item in buf.iter::<T>().unwrap() {
/// write!(f, "{} ", item)
/// .expect("Error occurred while writing MyBuffer.");
/// }
/// }
/// call_numeric_buffer_fn!( display_buf::<_>(&self.0, f) or {});
/// write!(f, "")
/// }
/// }
/// ```
#[macro_export]
macro_rules! call_numeric_buffer_fn {
($fn:ident ::<_,$($params:ident),*>( $data:expr, $($args:expr),* ) or $err:block ) => {
{
let buf = $data;
unsafe {
match buf.element_type_id() {
x if x == ::std::any::TypeId::of::<u8>() => $fn::<u8,$($params),*> (buf, $($args),*),
x if x == ::std::any::TypeId::of::<i8>() => $fn::<i8,$($params),*> (buf, $($args),*),
x if x == ::std::any::TypeId::of::<u16>() => $fn::<u16,$($params),*>(buf, $($args),*),
x if x == ::std::any::TypeId::of::<i16>() => $fn::<i16,$($params),*>(buf, $($args),*),
x if x == ::std::any::TypeId::of::<u32>() => $fn::<u32,$($params),*>(buf, $($args),*),
x if x == ::std::any::TypeId::of::<i32>() => $fn::<i32,$($params),*>(buf, $($args),*),
x if x == ::std::any::TypeId::of::<u64>() => $fn::<u64,$($params),*>(buf, $($args),*),
x if x == ::std::any::TypeId::of::<i64>() => $fn::<i64,$($params),*>(buf, $($args),*),
x if x == ::std::any::TypeId::of::<f32>() => $fn::<f32,$($params),*>(buf, $($args),*),
x if x == ::std::any::TypeId::of::<f64>() => $fn::<f64,$($params),*>(buf, $($args),*),
_ => $err,
}
}
}
};
// Same thing as above but with one parameter argument.
($fn:ident ::<_>( $($args:expr),* ) or $err:block ) => {
call_numeric_buffer_fn!($fn ::<_,>( $($args),* ) or $err )
};
// Same thing as above but with one function argument.
($fn:ident ::<_,$($params:ident),*>( $data:expr ) or $err:block ) => {
call_numeric_buffer_fn!($fn ::<_,$($params),*>( $data, ) or $err )
};
// Using method synax for member functions if any.
($data:ident . $fn:ident ::<_,$($params:ident),*>( $($args:expr),* ) or $err:block ) => {
{
let buf = $data;
unsafe {
match buf.element_type_id() {
x if x == ::std::any::TypeId::of::<u8>() => buf.$fn::<u8,$($params),*> ($($args),*),
x if x == ::std::any::TypeId::of::<i8>() => buf.$fn::<i8,$($params),*> ($($args),*),
x if x == ::std::any::TypeId::of::<u16>() => buf.$fn::<u16,$($params),*>($($args),*),
x if x == ::std::any::TypeId::of::<i16>() => buf.$fn::<i16,$($params),*>($($args),*),
x if x == ::std::any::TypeId::of::<u32>() => buf.$fn::<u32,$($params),*>($($args),*),
x if x == ::std::any::TypeId::of::<i32>() => buf.$fn::<i32,$($params),*>($($args),*),
x if x == ::std::any::TypeId::of::<u64>() => buf.$fn::<u64,$($params),*>($($args),*),
x if x == ::std::any::TypeId::of::<i64>() => buf.$fn::<i64,$($params),*>($($args),*),
x if x == ::std::any::TypeId::of::<f32>() => buf.$fn::<f32,$($params),*>($($args),*),
x if x == ::std::any::TypeId::of::<f64>() => buf.$fn::<f64,$($params),*>($($args),*),
_ => $err,
}
}
}
};
// Same as above but with one parameter argument.
($data:ident . $fn:ident ::<_>( $($args:expr),* ) or $err:block ) => {
call_numeric_buffer_fn!($data . $fn ::<_,>( $($args),* ) or $err )
};
}
|
use crate::{RcAny, Wrc};
use config::{Config, Environment, File};
use lazy_static::lazy_static;
use regex::Regex;
use std::any::{type_name, TypeId};
use std::collections::HashMap;
use std::env;
use std::env::args;
use std::marker::PhantomData;
pub mod profiles {
pub struct Default;
pub struct Dev;
pub struct Test;
}
pub trait Component {
fn __inexor_rgf_core_di_create<P>(container: &mut Container<P>) -> Self;
fn __inexor_rgf_core_di_inject_deferred<P>(container: &mut Container<P>, component: &Self);
}
pub trait Provider<T: ?Sized> {
type Impl;
fn get(&mut self) -> Wrc<Self::Impl>;
fn create(&mut self) -> Self::Impl;
fn get_ref(&mut self) -> &Self::Impl {
// Value under RC is still stored in container, so it can be safely returned as a reference
// that has the same life as container reference
unsafe { Wrc::as_ptr(&Self::get(self)).as_ref().unwrap() }
}
fn create_boxed(&mut self) -> Box<Self::Impl> {
Box::new(Self::create(self))
}
}
pub struct Container<P> {
profile: PhantomData<P>,
pub config: Config,
pub components: HashMap<TypeId, RcAny>,
}
impl<P> Container<P> {
pub fn new() -> Container<P> {
let mut config = Config::new();
config
.merge(File::with_name("config/default").required(false))
.expect("Failed to read default config file");
let profile = profile_name::<P>();
if profile.ne(&"default".to_string()) {
config
.merge(File::with_name(&format!("config/{}", profile)).required(false))
.expect(format!("Failed to read {} config file", profile).as_str());
}
config.merge(Environment::new()).expect("Failed to load environment");
// config.merge(parse_args())
// .expect("Failed to parse args");
Container {
config,
profile: PhantomData::<P>,
components: HashMap::new(),
}
}
}
lazy_static! {
pub static ref APP_PROFILE: String = parse_profile();
}
fn parse_profile() -> String {
let mut config = Config::new();
config
.merge(File::with_name("config/default").required(false))
.expect("Failed to read default config file");
let profile_arg = args().position(|arg| arg.as_str() == "--profile").and_then(|arg_pos| args().nth(arg_pos + 1));
let parsed_profile = profile_arg
.or(env::var("PROFILE").ok())
.or(config.get_str("profile").ok())
.unwrap_or("default".to_string());
log::info!("Using profile: {}", parsed_profile);
parsed_profile
}
pub fn parse_args() -> Config {
let mut config = Config::new();
let mut args = args().peekable();
loop {
let arg = args.next();
if arg.is_some() {
let arg = arg.unwrap();
if arg.starts_with("--") {
let value = args.peek();
if value.is_none() || value.unwrap().starts_with("--") {
config.set(&arg[2..], true).unwrap();
} else {
let arg = args.next().unwrap();
config.set(&arg[2..], args.next().unwrap()).unwrap();
}
}
} else {
break;
}
}
config
}
pub fn profile_name<T>() -> String {
let profile_type_name = type_name::<T>().to_lowercase();
Regex::new(r".*::").unwrap().replace(profile_type_name.as_str(), "").to_string()
}
|
use bytes::{ Bytes, Buf, BytesMut, BufMut };
#[derive(Debug, PartialEq, Clone)]
pub struct NodeIDWithCallbackRequest {
pub node_id : u8,
pub callback : u8,
}
impl NodeIDWithCallbackRequest {
pub fn encode(&self, dst: &mut BytesMut) {
dst.put_u8(self.node_id);
dst.put_u8(self.callback);
}
pub fn decode(src: &mut Bytes) -> NodeIDWithCallbackRequest {
let node_id = src.get_u8();
let callback = src.get_u8();
NodeIDWithCallbackRequest { node_id, callback }
}
} |
#[doc = "Register `HWCFGR2` reader"]
pub type R = crate::R<HWCFGR2_SPEC>;
#[doc = "Field `MASTERID1` reader - Hardware Configuration valid bus masters ID1"]
pub type MASTERID1_R = crate::FieldReader;
#[doc = "Field `MASTERID2` reader - Hardware Configuration valid bus masters ID2"]
pub type MASTERID2_R = crate::FieldReader;
#[doc = "Field `MASTERID3` reader - Hardware Configuration valid bus masters ID3"]
pub type MASTERID3_R = crate::FieldReader;
#[doc = "Field `MASTERID4` reader - Hardware Configuration valid bus masters ID4"]
pub type MASTERID4_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:3 - Hardware Configuration valid bus masters ID1"]
#[inline(always)]
pub fn masterid1(&self) -> MASTERID1_R {
MASTERID1_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - Hardware Configuration valid bus masters ID2"]
#[inline(always)]
pub fn masterid2(&self) -> MASTERID2_R {
MASTERID2_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - Hardware Configuration valid bus masters ID3"]
#[inline(always)]
pub fn masterid3(&self) -> MASTERID3_R {
MASTERID3_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - Hardware Configuration valid bus masters ID4"]
#[inline(always)]
pub fn masterid4(&self) -> MASTERID4_R {
MASTERID4_R::new(((self.bits >> 12) & 0x0f) as u8)
}
}
#[doc = "Semaphore hardware configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hwcfgr2::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HWCFGR2_SPEC;
impl crate::RegisterSpec for HWCFGR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hwcfgr2::R`](R) reader structure"]
impl crate::Readable for HWCFGR2_SPEC {}
#[doc = "`reset()` method sets HWCFGR2 to value 0x84"]
impl crate::Resettable for HWCFGR2_SPEC {
const RESET_VALUE: Self::Ux = 0x84;
}
|
/**
The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
Find the sum of all the primes below two million.
*/
pub fn execute() {
let mut number : u64 = 3;
let mut primes : Vec<u64> = Vec::new();
primes.push(2);
while number < 2_000_000 {
let prime = check_already_found_primes(number, &primes);
if prime != 0 {
println!("Found new prime: {}", prime);
primes.push(prime);
println!("primes len = {}", primes.len());
}
number = number + 1;
}
let result:u64 = primes.iter().sum();
println!("The sum of all primes is = {}", result);
}
fn check_already_found_primes(to_test:u64, primes: &Vec<u64>) -> u64{
if to_test % 2 == 0 {
return 0;
}
for prime in primes {
if to_test % prime == 0{
return 0;
}
}
let mut last_number = primes[primes.len() - 1];
while last_number < to_test {
if to_test % last_number == 0{
return 0;
}
last_number = last_number + 1;
}
return last_number;
} |
// Copyright 2016 FullContact, Inc
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::iter::Iterator;
use std::ops::{Deref, DerefMut};
use cursor::Cursor;
use error::Result;
use tx::ConstAccessor;
use traits::*;
/// A mutable value which is either owned or borrowed from an owning context.
///
/// This is very different from `Cow` in that one can mutate the shared
/// reference but cannot take ownership.
#[derive(Debug)]
#[allow(missing_docs)]
pub enum MaybeOwned<'a, T : 'a> {
Owned(T),
Borrowed(&'a mut T),
}
impl<'a, T : 'a> Deref for MaybeOwned<'a, T> {
type Target = T;
fn deref(&self) -> &T {
match *self {
MaybeOwned::Owned(ref t) => t,
MaybeOwned::Borrowed(ref t) => *t,
}
}
}
impl<'a, T : 'a> DerefMut for MaybeOwned<'a, T> {
fn deref_mut(&mut self) -> &mut T {
match *self {
MaybeOwned::Owned(ref mut t) => t,
// `ref mut` is necessary so we can borrow the field out of the
// enum.
MaybeOwned::Borrowed(ref mut t) => *t,
}
}
}
/// An iterator over items returned by successive calls of some function on
/// `Cursor` until a `NOTFOUND` error is returned.
///
/// Special handling is afforded the first item in the iterator, since the
/// simple act of positioning the cursor produces the first item.
pub struct CursorIter<'a, 'access: 'a, 'txn: 'access, 'db: 'txn, T> {
cursor: MaybeOwned<'a, Cursor<'txn,'db>>,
access: &'access ConstAccessor<'txn>,
head: Option<T>,
next: fn (&mut Cursor<'txn,'db>, &'access ConstAccessor<'txn>)
-> Result<T>,
}
impl<'a, 'access: 'a, 'txn: 'access, 'db: 'txn, T>
CursorIter<'a, 'access, 'txn, 'db, T> {
/// Creates a cursor iterator from the given cursor and accessor.
///
/// `head` is invoked immediately on `cursor` and `accessor` to position
/// the cursor. The value it returns (if any) will be used as the first
/// value produced by the cursor.
///
/// Beyond the first item, `next` will be invoked on `cursor` and
/// `accessor` to produce further items. Note that this is a plain function
/// pointer and not a function object so that the type does not need to be
/// encoded in the type of this iterator.
///
/// ## Example
///
/// ```
/// # include!(concat!(env!("CARGO_MANIFEST_DIR"),"/src/example_helpers.rs"));
/// # fn main() {
/// # let env = create_env();
/// # let db = dupdb(&env);
/// let txn = lmdb::WriteTransaction::new(&env).unwrap();
/// {
/// let mut access = txn.access();
/// let f = lmdb::put::Flags::empty();
/// access.put(&db, "Fruit", "Apple", f).unwrap();
/// access.put(&db, "Fruit", "Orange", f).unwrap();
/// access.put(&db, "Animal", "Badger", f).unwrap();
/// access.put(&db, "Veggie", "Carrot", f).unwrap();
///
/// let mut cursor = txn.cursor(&db).unwrap();
/// let mut iter = lmdb::CursorIter::new(
/// lmdb::MaybeOwned::Borrowed(&mut cursor), &*access,
/// |c, a| c.first(a), lmdb::Cursor::next::<str,str>).unwrap();
/// assert_eq!(("Animal", "Badger"), iter.next().unwrap().unwrap());
/// assert_eq!(("Fruit", "Apple"), iter.next().unwrap().unwrap());
/// assert_eq!(("Fruit", "Orange"), iter.next().unwrap().unwrap());
/// assert_eq!(("Veggie", "Carrot"), iter.next().unwrap().unwrap());
/// assert!(iter.next().is_none());
/// }
/// txn.commit().unwrap();
/// # }
/// ```
pub fn new<H : FnOnce(&mut Cursor<'txn,'db>,
&'access ConstAccessor<'txn>)
-> Result<T>>
(mut cursor: MaybeOwned<'a, Cursor<'txn,'db>>,
access: &'access ConstAccessor<'txn>,
head: H,
next: fn (&mut Cursor<'txn,'db>, &'access ConstAccessor<'txn>)
-> Result<T>)
-> Result<Self>
{
let head_val = try!(head(&mut*cursor, access).to_opt());
Ok(CursorIter {
cursor: cursor,
access: access,
head: head_val,
next: next,
})
}
}
impl<'a, 'access: 'a, 'txn: 'access, 'db: 'txn, T> Iterator
for CursorIter<'a, 'access, 'txn, 'db, T> {
type Item = Result<T>;
fn next(&mut self) -> Option<Result<T>> {
if let Some(head) = self.head.take() {
Some(Ok(head))
} else {
match (self.next)(&mut*self.cursor, self.access).to_opt() {
Ok(Some(v)) => Some(Ok(v)),
Ok(None) => None,
Err(err) => Some(Err(err.into())),
}
}
}
}
|
use std::{io::Write, thread};
use std::{ops::Add, time};
fn main() {
// print!("Hello");
// std::io::stdout().flush().unwrap(); // WTH Lol
// thread::sleep(time::Duration::from_secs(2));
// println!(", world!");
let d1 = time::Duration::new(2, 0);
let mut d2 = time::Duration::new(0, 0);
while d2 != d1 {
thread::sleep(time::Duration::from_secs(1));
d2 += time::Duration::from_secs(1);
// d2 = d2.add(time::Duration::from_secs(1));
println!("{:?}", d2);
}
}
|
use std::{sync::Arc, time::Duration, time::Instant};
use rodio::Sink;
pub(crate) struct FadeKey {
pub sink: Arc<Sink>,
}
impl std::hash::Hash for FadeKey {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
std::ptr::hash(&*self.sink, state);
}
}
impl std::cmp::PartialEq for FadeKey {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.sink, &other.sink)
}
}
impl std::cmp::Eq for FadeKey {}
pub(crate) enum FadeType {
Pause,
Resume,
AlterVolume,
}
pub(crate) struct Fade {
sink: Arc<Sink>,
start_volume: f32,
dest_volume: f32,
start_time: Instant,
duration: Duration,
fade_type: FadeType,
}
impl Fade {
pub fn new(sink: Arc<Sink>, dest_volume: f32, duration: Duration, fade_type: FadeType) -> Self {
let start_volume = sink.volume();
let start_time = Instant::now();
Self {
sink,
start_volume,
dest_volume,
start_time,
duration,
fade_type,
}
}
pub fn merge_pair(old: Self, new: Self) -> Self {
use FadeType::*;
match (&old.fade_type, &new.fade_type) {
(AlterVolume, _) => new,
(Pause, _) => old,
(Resume, _) => old,
}
}
pub fn update(&self, now: Instant) -> bool {
let current_duration = now.duration_since(self.start_time);
let t = current_duration.as_secs_f32() / self.duration.as_secs_f32();
let new_volume = self.start_volume + ((self.dest_volume - self.start_volume) * t.min(1.0));
self.sink.set_volume(new_volume);
let finished = t >= 1.0;
if finished {
if let FadeType::Pause = &self.fade_type {
self.sink.pause();
}
}
!finished
}
pub fn key(&self) -> FadeKey {
FadeKey {
sink: self.sink.clone(),
}
}
}
|
/**********************************************
> File Name : circular_queue.rs
> Author : lunar
> Email : lunar_ubuntu@qq.com
> Created Time : Tue 14 Dec 2021 10:05:05 AM CST
> Location : Shanghai
> Copyright@ https://github.com/xiaoqixian
**********************************************/
struct MyCircularQueue {
cq: Vec<i32>,
head: usize,
tail: usize,
alloc: usize
}
/**
* `&self` means the method takes an immutable reference.
* If you need a mutable reference, change it to `&mut self` instead.
*/
impl MyCircularQueue {
fn new(k: i32) -> Self {
Self {
cq: vec![0; k as usize],
head: 0,
tail: 0,
alloc: 0
}
}
fn en_queue(&mut self, value: i32) -> bool {
if self.alloc == self.cq.len() {
return false;
}
self.cq[self.tail] = value;
self.tail += 1;
self.alloc += 1;
if self.tail == self.cq.len() {
self.tail = 0;
}
true
}
fn de_queue(&mut self) -> bool {
if self.alloc == 0 {
return false;
}
self.head += 1;
self.alloc -= 1;
if self.head == self.cq.len() {
self.head = 0;
}
true
}
fn front(&self) -> i32 {
if self.alloc == 0 {
-1
} else {
self.cq[self.head]
}
}
fn rear(&self) -> i32 {
if self.alloc == 0 {
-1
} else {
if self.tail == 0 {
self.cq[self.cq.len()-1]
} else {
self.cq[self.tail-1]
}
}
}
fn is_empty(&self) -> bool {
self.alloc == 0
}
fn is_full(&self) -> bool {
self.alloc == self.cq.len()
}
}
/**
* Your MyCircularQueue object will be instantiated and called as such:
* let obj = MyCircularQueue::new(k);
* let ret_1: bool = obj.en_queue(value);
* let ret_2: bool = obj.de_queue();
* let ret_3: i32 = obj.front();
* let ret_4: i32 = obj.rear();
* let ret_5: bool = obj.is_empty();
* let ret_6: bool = obj.is_full();
*/
|
#![crate_name = "music_generator"]
pub mod representations;
pub use representations::note::Note;
pub use representations::interval::Interval;
/// There are 12 simi-tone notes in an octave.
static NOTES_PER_OCTAVE: i32 = 12;
|
use ir::Value;
/// A function argument
pub struct Argument<'ctx>(Value<'ctx>);
impl<'ctx> Argument<'ctx> { }
impl_subtype!(Argument => Value);
|
use crate::{parser::*, val::Val};
use std::collections::BTreeMap;
type Error = &'static str;
#[derive(Debug, Default)]
pub struct Environment {
variables: BTreeMap<String, Val>,
//functions : std::collections::HashMap<&str, _>
}
impl Environment {
pub fn new() -> Environment {
Environment {
variables: BTreeMap::new(),
}
}
pub fn insert(&mut self, variable: String, value: Val) {
self.variables.insert(variable, value);
}
pub fn get_mut_ref(&mut self, key: &str) -> Result<&mut Val, Error> {
self.variables.get_mut(key).ok_or("Undeclared variable")
}
pub fn get_ref(&mut self, key: &str) -> Result<&Val, Error> {
self.variables.get(key).ok_or("Undeclared variable")
}
fn execute_vec(&mut self, v: &[ParseExprNode]) -> Result<Val, Error> {
Ok(Val::Vec(
v.iter()
.map(|n| self.evaluate(n))
.collect::<Result<Vec<_>, _>>()?,
))
}
fn execute_vec_access(&mut self, name: &str, index: &[ParseExprNode]) -> Result<Val, Error> {
let computed_indexes: Result<Vec<_>, _> = index.iter().map(|n| self.evaluate(n)).collect();
let mut a = self.get_mut_ref(name)?;
for i in computed_indexes? {
a = a.index(i)?
}
Ok(a.clone())
}
pub fn evaluate(&mut self, node: &ParseExprNode) -> Result<Val, Error> {
match node {
ParseExprNode::VarName(a) => Ok(self.get_mut_ref(a)?.clone()),
ParseExprNode::Number(n) => Ok(n.clone()),
ParseExprNode::String(s) => Ok(s.clone()),
ParseExprNode::Bool(b) => Ok(b.clone()),
ParseExprNode::Null => Ok(Val::Null),
ParseExprNode::VecAccess(name, index) => self.execute_vec_access(name, index),
ParseExprNode::Vector(v) => self.execute_vec(v),
ParseExprNode::Neg(n) => Ok(self.evaluate(&n)?.minus()?),
ParseExprNode::Mul(s) => self.evaluate(&s[0])?.mul(self.evaluate(&s[1])?),
ParseExprNode::Div(s) => self.evaluate(&s[0])?.div(self.evaluate(&s[1])?),
ParseExprNode::Rem(s) => self.evaluate(&s[0])?.rem(self.evaluate(&s[1])?),
ParseExprNode::Add(s) => self.evaluate(&s[0])?.add(self.evaluate(&s[1])?),
ParseExprNode::Sub(s) => self.evaluate(&s[0])?.sub(self.evaluate(&s[1])?),
ParseExprNode::Eq(s) => Ok(Val::Bool(self.evaluate(&s[0])?.eq(&self.evaluate(&s[1])?))),
ParseExprNode::NotEq(s) => {
Ok(Val::Bool(self.evaluate(&s[0])?.ne(&self.evaluate(&s[1])?)))
}
ParseExprNode::And(s) => self.evaluate(&s[0])?.and(self.evaluate(&s[1])?),
ParseExprNode::Or(s) => self.evaluate(&s[0])?.or(self.evaluate(&s[1])?),
ParseExprNode::Not(b) => Ok(self.evaluate(&b)?.not()?),
ParseExprNode::Gt(s) => Ok(Val::Bool(self.evaluate(&s[0])? > self.evaluate(&s[1])?)),
ParseExprNode::Lt(s) => Ok(Val::Bool(self.evaluate(&s[0])? < self.evaluate(&s[1])?)),
ParseExprNode::Gtoe(s) => Ok(Val::Bool(self.evaluate(&s[0])? >= self.evaluate(&s[1])?)),
ParseExprNode::Ltoe(s) => Ok(Val::Bool(self.evaluate(&s[0])? <= self.evaluate(&s[1])?)),
}
}
}
|
use serde::Serialize;
use crate::rocket_contrib::json::Json;
#[derive(Serialize)]
pub struct ApiStatus {
status: String
}
#[get("/")]
pub fn status() -> Json<ApiStatus> {
Json(ApiStatus{
status: String::from("Up and running!")
})
}
|
use std::error::Error;
use std::net::{Ipv4Addr, SocketAddrV4};
use tokio::prelude::*;
#[derive(Debug)]
pub struct MixPeer {
connection: SocketAddrV4,
}
impl MixPeer {
// note that very soon `next_hop_address` will be changed to `next_hop_metadata`
pub fn new(next_hop_address: [u8; 32]) -> MixPeer {
let b = next_hop_address;
let host = Ipv4Addr::new(b[0], b[1], b[2], b[3]);
let port: u16 = u16::from_be_bytes([b[4], b[5]]);
let socket_address = SocketAddrV4::new(host, port);
MixPeer {
connection: socket_address,
}
}
pub async fn send(&self, bytes: Vec<u8>) -> Result<(), Box<dyn Error>> {
let next_hop_address = self.connection.clone();
let mut stream = tokio::net::TcpStream::connect(next_hop_address).await?;
stream.write_all(&bytes).await?;
Ok(())
}
pub fn to_string(&self) -> String {
self.connection.to_string()
}
}
|
use std::sync::Arc;
use super::{MainIndex, SynonymsIndex, WordsIndex, DocsWordsIndex, DocumentsIndex, CustomSettings};
#[derive(Clone)]
pub struct RawIndex {
pub main: MainIndex,
pub synonyms: SynonymsIndex,
pub words: WordsIndex,
pub docs_words: DocsWordsIndex,
pub documents: DocumentsIndex,
pub custom: CustomSettings,
}
impl RawIndex {
pub(crate) fn compact(&self) {
self.main.0.compact_range(None::<&[u8]>, None::<&[u8]>);
self.synonyms.0.compact_range(None::<&[u8]>, None::<&[u8]>);
self.words.0.compact_range(None::<&[u8]>, None::<&[u8]>);
self.docs_words.0.compact_range(None::<&[u8]>, None::<&[u8]>);
self.documents.0.compact_range(None::<&[u8]>, None::<&[u8]>);
self.custom.0.compact_range(None::<&[u8]>, None::<&[u8]>);
}
}
#[derive(Clone)]
pub struct InnerRawIndex {
database: Arc<rocksdb::DB>,
name: Arc<str>,
}
impl InnerRawIndex {
pub fn new(database: Arc<rocksdb::DB>, name: Arc<str>) -> InnerRawIndex {
InnerRawIndex { database, name }
}
pub fn get<K>(&self, key: K) -> Result<Option<rocksdb::DBVector>, rocksdb::Error>
where K: AsRef<[u8]>,
{
let cf = self.database.cf_handle(&self.name).expect("cf not found");
self.database.get_cf(cf, key)
}
pub fn get_pinned<K>(&self, key: K) -> Result<Option<rocksdb::DBPinnableSlice>, rocksdb::Error>
where K: AsRef<[u8]>,
{
let cf = self.database.cf_handle(&self.name).expect("cf not found");
self.database.get_pinned_cf(cf, key)
}
pub fn iterator(&self, from: rocksdb::IteratorMode) -> Result<rocksdb::DBIterator, rocksdb::Error> {
let cf = self.database.cf_handle(&self.name).expect("cf not found");
self.database.iterator_cf(cf, from)
}
pub fn set<K, V>(&self, key: K, value: V) -> Result<(), rocksdb::Error>
where K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let cf = self.database.cf_handle(&self.name).expect("cf not found");
self.database.put_cf(cf, key, value)
}
pub fn delete<K>(&self, key: K) -> Result<(), rocksdb::Error>
where K: AsRef<[u8]>
{
let cf = self.database.cf_handle(&self.name).expect("cf not found");
self.database.delete_cf(cf, key)
}
pub fn delete_range<K>(&self, start: K, end: K) -> Result<(), rocksdb::Error>
where K: AsRef<[u8]>,
{
let mut batch = rocksdb::WriteBatch::default();
let cf = self.database.cf_handle(&self.name).expect("cf not found");
batch.delete_range_cf(cf, start, end)?;
self.database.write(batch)
}
pub fn compact_range<S, E>(&self, start: Option<S>, end: Option<E>)
where S: AsRef<[u8]>,
E: AsRef<[u8]>,
{
let cf = self.database.cf_handle(&self.name).expect("cf not found");
self.database.compact_range_cf(cf, start, end)
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod operations {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.TimeSeriesInsights/operations", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: OperationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod environments {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
) -> std::result::Result<EnvironmentResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: EnvironmentResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
parameters: &EnvironmentCreateOrUpdateParameters,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: EnvironmentResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: EnvironmentResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
StatusCode::NOT_FOUND => create_or_update::NotFound404 {}.fail(),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(EnvironmentResource),
Created201(EnvironmentResource),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
NotFound404 {},
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
environment_update_parameters: &EnvironmentUpdateParameters,
) -> std::result::Result<EnvironmentResource, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(environment_update_parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: EnvironmentResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<EnvironmentListResponse, list_by_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: EnvironmentListResponse =
serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?;
list_by_resource_group::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<EnvironmentListResponse, list_by_subscription::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.TimeSeriesInsights/environments",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_subscription::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_subscription::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_subscription::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: EnvironmentListResponse =
serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_subscription::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_subscription::DeserializeError { body })?;
list_by_subscription::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod event_sources {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
event_source_name: &str,
) -> std::result::Result<EventSourceResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/eventSources/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, event_source_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: EventSourceResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
event_source_name: &str,
parameters: &EventSourceCreateOrUpdateParameters,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/eventSources/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, event_source_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: EventSourceResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: EventSourceResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(EventSourceResource),
Created201(EventSourceResource),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
event_source_name: &str,
event_source_update_parameters: &EventSourceUpdateParameters,
) -> std::result::Result<EventSourceResource, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/eventSources/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, event_source_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(event_source_update_parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: EventSourceResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
event_source_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/eventSources/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, event_source_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_environment(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
) -> std::result::Result<EventSourceListResponse, list_by_environment::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/eventSources",
&operation_config.base_path, subscription_id, resource_group_name, environment_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_environment::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_environment::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_environment::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_environment::ResponseBytesError)?;
let rsp_value: EventSourceListResponse =
serde_json::from_slice(&body).context(list_by_environment::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_environment::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_environment::DeserializeError { body })?;
list_by_environment::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_environment {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod reference_data_sets {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
reference_data_set_name: &str,
) -> std::result::Result<ReferenceDataSetResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/referenceDataSets/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, reference_data_set_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: ReferenceDataSetResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
reference_data_set_name: &str,
parameters: &ReferenceDataSetCreateOrUpdateParameters,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/referenceDataSets/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, reference_data_set_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ReferenceDataSetResource =
serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: ReferenceDataSetResource =
serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(ReferenceDataSetResource),
Created201(ReferenceDataSetResource),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
reference_data_set_name: &str,
reference_data_set_update_parameters: &ReferenceDataSetUpdateParameters,
) -> std::result::Result<ReferenceDataSetResource, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/referenceDataSets/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, reference_data_set_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(reference_data_set_update_parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: ReferenceDataSetResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
reference_data_set_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/referenceDataSets/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, reference_data_set_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_environment(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
) -> std::result::Result<ReferenceDataSetListResponse, list_by_environment::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/referenceDataSets",
&operation_config.base_path, subscription_id, resource_group_name, environment_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_environment::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_environment::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_environment::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_environment::ResponseBytesError)?;
let rsp_value: ReferenceDataSetListResponse =
serde_json::from_slice(&body).context(list_by_environment::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_environment::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_environment::DeserializeError { body })?;
list_by_environment::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_environment {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
pub mod access_policies {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
access_policy_name: &str,
) -> std::result::Result<AccessPolicyResource, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/accessPolicies/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, access_policy_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: AccessPolicyResource = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
get::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
access_policy_name: &str,
parameters: &AccessPolicyCreateOrUpdateParameters,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/accessPolicies/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, access_policy_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(parameters);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: AccessPolicyResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: AccessPolicyResource = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
create_or_update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(AccessPolicyResource),
Created201(AccessPolicyResource),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
access_policy_name: &str,
access_policy_update_parameters: &AccessPolicyUpdateParameters,
) -> std::result::Result<AccessPolicyResource, update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/accessPolicies/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, access_policy_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(access_policy_update_parameters);
let req = req_builder.build().context(update::BuildRequestError)?;
let rsp = client.execute(req).await.context(update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: AccessPolicyResource = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(update::DeserializeError { body })?;
update::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
access_policy_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/accessPolicies/{}",
&operation_config.base_path, subscription_id, resource_group_name, environment_name, access_policy_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(delete::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(delete::BuildRequestError)?;
let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(delete::Response::Ok200),
StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(delete::DeserializeError { body })?;
delete::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod delete {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
pub async fn list_by_environment(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
environment_name: &str,
) -> std::result::Result<AccessPolicyListResponse, list_by_environment::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.TimeSeriesInsights/environments/{}/accessPolicies",
&operation_config.base_path, subscription_id, resource_group_name, environment_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_by_environment::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list_by_environment::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_by_environment::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_environment::ResponseBytesError)?;
let rsp_value: AccessPolicyListResponse =
serde_json::from_slice(&body).context(list_by_environment::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_by_environment::ResponseBytesError)?;
let rsp_value: CloudError = serde_json::from_slice(&body).context(list_by_environment::DeserializeError { body })?;
list_by_environment::DefaultResponse {
status_code,
value: rsp_value,
}
.fail()
}
}
}
pub mod list_by_environment {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
DefaultResponse {
status_code: StatusCode,
value: models::CloudError,
},
BuildRequestError {
source: reqwest::Error,
},
ExecuteRequestError {
source: reqwest::Error,
},
ResponseBytesError {
source: reqwest::Error,
},
DeserializeError {
source: serde_json::Error,
body: bytes::Bytes,
},
GetTokenError {
source: azure_core::errors::AzureError,
},
}
}
}
|
struct Point {
x: f32,
y: f32
}
impl Point {
fn get_radius(&self) -> f32 {
(self.x*self.x + self.y*self.y).sqrt()
}
}
fn struct_method() {
println!("struct_method -------------------");
let p = Point{x:3.2, y:1.2};
println!("radisu = {}", p.get_radius());
}
fn say_hi() {println!("Hi");}
fn closure_func_var() {
println!("closure_func_var -------------------");
let sh = say_hi;
sh();
let one = 1;
let plus_one = |x:i32| -> i32 {x + 1};
let plus_one_1 = |x| {x + one}; // implicit type
println!("{} + 1 = {}", 3, plus_one_1(3));
let borrow_one = &one;
println!("{} + 1 = {}", 4, plus_one_1(4));
}
fn print_tuple(t: (i32, i32)) { // no param pattern or reference
println!("Current location: ({}, {})", t.0, t.1);
}
fn print_tuple_pat((x, y): (i32, i32)) { // pattern param, no reference
println!("Current location: ({}, {})", x, y);
}
fn print_tuple_ref(t: &(i32, i32)) { // no param pattern, wiht reference
println!("Current location: ({}, {})", (*t).0, (*t).1);
}
fn print_tuple_ref_val(&t: &(i32, i32)) { // reference of type and value
println!("Current location: ({}, {})", t.0, t.1);
}
fn print_tuple_ref_pat(&(x, y): &(i32, i32)) { // param pattern, reference of type and value
println!("Current location: ({}, {})", x, y);
}
fn param_pattern_reference() {
println!("param_pattern_reference -------------");
print_tuple((1,2));
print_tuple_pat((1,2));
print_tuple_ref(&(1, 2));
print_tuple_ref_val(&(1, 2));
print_tuple_ref_pat(&(1,2));
}
fn higher_order_function() {
let limit = 200;
let mut sum = 0;
for i in 0.. {
let isq = i*i;
if isq > limit {break}
else if isq % 2 == 0 {sum += isq;}
}
let sum2 =
(0..).map(|x| x*x)
.take_while(|&x| x < limit)
.filter(|&x| (x %2 == 0))
.fold(0, |sum, x| sum+x);
println!("sum2 is {}", sum2);
}
pub fn functions() {
higher_order_function();
param_pattern_reference();
closure_func_var();
struct_method();
} |
use std::fs::File;
use std::io;
use std::io::Read;
use zip::read::ZipFile;
use log::*;
/// Trait for read types that can report their size.
pub trait LengthRead: Read {
fn input_size(&self) -> io::Result<u64>;
/// Read all bytes from a file, using its size to pre-allocate capacity.
fn read_all_sized(&mut self) -> io::Result<Vec<u8>> {
let mut cap = self.input_size()? as usize;
let mut out = vec![0; cap + 1];
let mut pos = 0;
loop {
trace!("requesting {} bytes", cap - pos);
let ls = self.read(&mut out[pos..])?;
pos += ls;
if pos >= cap {
// the size hint was wrong
warn!("size was wrong, expanding");
cap += 16 * 1024;
out.resize(cap, 0);
}
if ls == 0 {
out.truncate(pos);
return Ok(out);
}
}
}
}
impl LengthRead for File {
fn input_size(&self) -> io::Result<u64> {
let meta = self.metadata()?;
Ok(meta.len())
}
}
impl<'a> LengthRead for ZipFile<'a> {
fn input_size(&self) -> io::Result<u64> {
Ok(self.size())
}
}
|
/* Multi-producer/single-consumer queue
* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//! http://www.1024cores.net/home/lock-free-algorithms/queues/intrusive-mpsc-node-based-queue
use std::unstable::sync::UnsafeArc;
use std::unstable::atomics::{AtomicPtr,Relaxed,Release,Acquire};
use std::ptr::{mut_null, to_mut_unsafe_ptr};
use std::cast;
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
impl<T> Node<T> {
fn empty() -> Node<T> {
Node{next: AtomicPtr::new(mut_null()), value: None}
}
fn with_value(value: T) -> Node<T> {
Node{next: AtomicPtr::new(mut_null()), value: Some(value)}
}
}
struct State<T> {
pad0: [u8, ..64],
head: AtomicPtr<Node<T>>,
pad1: [u8, ..64],
stub: Node<T>,
pad2: [u8, ..64],
tail: *mut Node<T>,
pad3: [u8, ..64],
}
struct Queue<T> {
priv state: UnsafeArc<State<T>>,
}
impl<T: Send> Clone for Queue<T> {
fn clone(&self) -> Queue<T> {
Queue {
state: self.state.clone()
}
}
}
impl<T: Send> State<T> {
pub fn new() -> State<T> {
State{
pad0: [0, ..64],
head: AtomicPtr::new(mut_null()),
pad1: [0, ..64],
stub: Node::<T>::empty(),
pad2: [0, ..64],
tail: mut_null(),
pad3: [0, ..64],
}
}
fn init(&mut self) {
let stub = self.get_stub_unsafe();
self.head.store(stub, Relaxed);
self.tail = stub;
}
fn get_stub_unsafe(&mut self) -> *mut Node<T> {
to_mut_unsafe_ptr(&mut self.stub)
}
fn push(&mut self, value: T) {
unsafe {
let node = cast::transmute(~Node::with_value(value));
self.push_node(node);
}
}
fn push_node(&mut self, node: *mut Node<T>) {
unsafe {
(*node).next.store(mut_null(), Release);
let prev = self.head.swap(node, Relaxed);
(*prev).next.store(node, Release);
}
}
fn pop(&mut self) -> Option<T> {
unsafe {
let mut tail = self.tail;
let mut next = (*tail).next.load(Acquire);
let stub = self.get_stub_unsafe();
if tail == stub {
if mut_null() == next {
return None
}
self.tail = next;
tail = next;
next = (*next).next.load(Acquire);
}
if next != mut_null() {
let tail: ~Node<T> = cast::transmute(tail);
self.tail = next;
return tail.value
}
let head = self.head.load(Relaxed);
if tail != head {
return None
}
self.push_node(stub);
next = (*tail).next.load(Acquire);
if next != mut_null() {
let tail: ~Node<T> = cast::transmute(tail);
self.tail = next;
return tail.value
}
}
None
}
}
impl<T: Send> Queue<T> {
pub fn new() -> Queue<T> {
unsafe {
let q = Queue{state: UnsafeArc::new(State::new())};
(*q.state.get()).init();
q
}
}
pub fn push(&mut self, value: T) {
unsafe { (*self.state.get()).push(value) }
}
pub fn casual_pop(&mut self) -> Option<T> {
unsafe { (*self.state.get()).pop() }
}
pub fn pop(&mut self) -> Option<T> {
unsafe{ (*self.state.get()).pop() }
}
}
#[cfg(test)]
mod tests {
use std::task;
use std::comm;
use super::Queue;
#[test]
fn test() {
let nthreads = 8u;
let nmsgs = 1000u;
let mut q = Queue::new();
assert_eq!(None, q.pop());
for _ in range(0, nthreads) {
let (port, chan) = comm::stream();
chan.send(q.clone());
do task::spawn_sched(task::SingleThreaded) {
let mut q = port.recv();
for i in range(0, nmsgs) {
q.push(i);
}
}
}
let mut i = 0u;
loop {
match q.pop() {
None => {},
Some(_) => {
i += 1;
if i == nthreads*nmsgs { break }
}
}
}
}
}
|
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Direction {
Up,
Right,
Down,
Left,
}
impl Direction {
pub fn opposite(&self) -> Self {
match self {
Self::Up => Self::Down,
Self::Right => Self::Left,
Self::Down => Self::Up,
Self::Left => Self::Right,
}
}
}
|
//! Data types used in packets.
use crate::adapters::*;
// Define trivial types
define_packet_data! {
GroundTileData {
x: i16,
y: i16,
tile_type: u16,
},
MoveRecord {
time: u32,
x: f32,
y: f32,
},
ObjectData {
object_type: u16,
status: ObjectStatusData,
},
ObjectStatusData {
object_id: u32,
pos: WorldPosData,
stats: WithLen<u16, Vec<StatData>>,
},
SlotObjectData {
object_id: u32,
slot_id: u8,
object_type: u32,
},
TradeItem {
item: u32,
slot_type: u32,
tradeable: bool,
included: bool,
},
WorldPosData {
x: f32,
y: f32,
},
}
define_stat_types! {
MAX_HP_STAT:i32 = 0,
HP_STAT: i32 = 1,
SIZE_STAT: i32 = 2,
MAX_MP_STAT: i32 = 3,
MP_STAT: i32 = 4,
NEXT_LEVEL_EXP_STAT: i32 = 5,
EXP_STAT: i32 = 6,
LEVEL_STAT: i32 = 7,
ATTACK_STAT: i32 = 20,
DEFENSE_STAT: i32 = 21,
SPEED_STAT: i32 = 22,
INVENTORY_0_STAT: i32 = 8,
INVENTORY_1_STAT: i32 = 9,
INVENTORY_2_STAT: i32 = 10,
INVENTORY_3_STAT: i32 = 11,
INVENTORY_4_STAT: i32 = 12,
INVENTORY_5_STAT: i32 = 13,
INVENTORY_6_STAT: i32 = 14,
INVENTORY_7_STAT: i32 = 15,
INVENTORY_8_STAT: i32 = 16,
INVENTORY_9_STAT: i32 = 17,
INVENTORY_10_STAT: i32 = 18,
INVENTORY_11_STAT: i32 = 19,
VITALITY_STAT: i32 = 26,
WISDOM_STAT: i32 = 27,
DEXTERITY_STAT: i32 = 28,
CONDITION_STAT: i32 = 29,
NUM_STARS_STAT: i32 = 30,
NAME_STAT: String = 31,
TEX1_STAT: i32 = 32,
TEX2_STAT: i32 = 33,
MERCHANDISE_TYPE_STAT: i32 = 34,
CREDITS_STAT: i32 = 35,
MERCHANDISE_PRICE_STAT: i32 = 36,
ACTIVE_STAT: i32 = 37,
ACCOUNT_ID_STAT: String = 38,
FAME_STAT: i32 = 39,
MERCHANDISE_CURRENCY_STAT: i32 = 40,
CONNECT_STAT: i32 = 41,
MERCHANDISE_COUNT_STAT: i32 = 42,
MERCHANDISE_MINS_LEFT_STAT: i32 = 43,
MERCHANDISE_DISCOUNT_STAT: i32 = 44,
MERCHANDISE_RANK_REQ_STAT: i32 = 45,
MAX_HP_BOOST_STAT: i32 = 46,
MAX_MP_BOOST_STAT: i32 = 47,
ATTACK_BOOST_STAT: i32 = 48,
DEFENSE_BOOST_STAT: i32 = 49,
SPEED_BOOST_STAT: i32 = 50,
VITALITY_BOOST_STAT: i32 = 51,
WISDOM_BOOST_STAT: i32 = 52,
DEXTERITY_BOOST_STAT: i32 = 53,
OWNER_ACCOUNT_ID_STAT: String = 54,
RANK_REQUIRED_STAT: i32 = 55,
NAME_CHOSEN_STAT: i32 = 56,
CURR_FAME_STAT: i32 = 57,
NEXT_CLASS_QUEST_FAME_STAT: i32 = 58,
LEGENDARY_RANK_STAT: i32 = 59,
SINK_LEVEL_STAT: i32 = 60,
ALT_TEXTURE_STAT: i32 = 61,
GUILD_NAME_STAT: String = 62,
GUILD_RANK_STAT: i32 = 63,
BREATH_STAT: i32 = 64,
XP_BOOSTED_STAT: i32 = 65,
XP_TIMER_STAT: i32 = 66,
LD_TIMER_STAT: i32 = 67,
LT_TIMER_STAT: i32 = 68,
HEALTH_POTION_STACK_STAT: i32 = 69,
MAGIC_POTION_STACK_STAT: i32 = 70,
BACKPACK_0_STAT: i32 = 71,
BACKPACK_1_STAT: i32 = 72,
BACKPACK_2_STAT: i32 = 73,
BACKPACK_3_STAT: i32 = 74,
BACKPACK_4_STAT: i32 = 75,
BACKPACK_5_STAT: i32 = 76,
BACKPACK_6_STAT: i32 = 77,
BACKPACK_7_STAT: i32 = 78,
HASBACKPACK_STAT: i32 = 79,
TEXTURE_STAT: i32 = 80,
PET_INSTANCEID_STAT: i32 = 81,
PET_NAME_STAT: String = 82,
PET_TYPE_STAT: i32 = 83,
PET_RARITY_STAT: i32 = 84,
PET_MAXABILITYPOWER_STAT: i32 = 85,
PET_FAMILY_STAT: i32 = 86,
PET_FIRSTABILITY_POINT_STAT: i32 = 87,
PET_SECONDABILITY_POINT_STAT: i32 = 88,
PET_THIRDABILITY_POINT_STAT: i32 = 89,
PET_FIRSTABILITY_POWER_STAT: i32 = 90,
PET_SECONDABILITY_POWER_STAT: i32 = 91,
PET_THIRDABILITY_POWER_STAT: i32 = 92,
PET_FIRSTABILITY_TYPE_STAT: i32 = 93,
PET_SECONDABILITY_TYPE_STAT: i32 = 94,
PET_THIRDABILITY_TYPE_STAT: i32 = 95,
NEW_CON_STAT: i32 = 96,
FORTUNE_TOKEN_STAT: i32 = 97,
SUPPORTER_POINTS_STAT: i32 = 98,
SUPPORTER_STAT: i32 = 99,
CHALLENGER_STARBG_STAT: i32 = 100,
PROJECTILE_SPEED_MULT: i32 = 102,
PROJECTILE_LIFE_MULT: i32 = 103,
}
/// ROTMG stat data - a `StatType` associated with either an integer (`i32`) or
/// a string (`String`).
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum StatData {
/// String stat data.
String(StatType, String),
/// Integer stat data.
Integer(StatType, i32),
}
impl FromPacketBytes for StatData {
type Output = StatData;
fn from_packet(reader: &mut PacketReader) -> Result<Self::Output, Box<PacketFormatError>> {
let typ = u8::from_packet(reader)?;
let typ = StatType::from_byte(typ)
.ok_or_else(|| Box::new(PacketFormatError::UnknownStatType(typ)))?;
if typ.is_string() {
<WithLen<u16, String>>::from_packet(reader).map(|s| StatData::String(typ, s))
} else {
i32::from_packet(reader).map(|i| StatData::Integer(typ, i))
}
}
}
impl<T: Into<StatData>> ToPacketBytes<T> for StatData {
fn to_packet(value: T, packet: &mut Vec<u8>) -> Result<(), Box<PacketFormatError>> {
let value = value.into();
match value {
StatData::Integer(typ, i) => {
u8::to_packet(typ.to_byte(), packet)?;
i32::to_packet(i, packet)?;
Ok(())
}
StatData::String(typ, s) => {
u8::to_packet(typ.to_byte(), packet)?;
<WithLen<u16, String>>::to_packet(s, packet)?;
Ok(())
}
}
}
}
|
//! Electrum Client
use bitcoin::{Script, Txid};
use api::ElectrumApi;
use batch::Batch;
use raw_client::*;
use types::*;
/// Generalized Electrum client that supports multiple backends. This wraps
/// [`RawClient`](client/struct.RawClient.html) and provides a more user-friendly
/// constructor that can choose the right backend based on the url prefix.
///
/// **This is available only with the `default` features, or if `proxy` and one ssl implementation are enabled**
pub enum Client {
#[doc(hidden)]
TCP(RawClient<ElectrumPlaintextStream>),
#[doc(hidden)]
SSL(RawClient<ElectrumSslStream>),
#[doc(hidden)]
Socks5(RawClient<ElectrumProxyStream>),
}
macro_rules! impl_inner_call {
( $self:expr, $name:ident $(, $args:expr)* ) => {
match $self {
Client::TCP(inner) => inner.$name( $($args, )* ),
Client::SSL(inner) => inner.$name( $($args, )* ),
Client::Socks5(inner) => inner.$name( $($args, )* ),
}
}
}
impl Client {
/// Generic constructor that supports multiple backends and, optionally, a socks5 proxy.
///
/// Supported prefixes are:
/// - tcp:// for a TCP plaintext client.
/// - ssl:// for an SSL-encrypted client. The server certificate will be verified.
///
/// If no prefix is specified, then `tcp://` is assumed.
///
/// The `socks5` argument can optionally be prefixed with `socks5://`.
///
/// **NOTE**: SSL-over-socks5 is currently not supported and will generate a runtime error.
pub fn new(url: &str, socks5: Option<&str>) -> Result<Self, Error> {
let socks5 = socks5.map(|s| s.replacen("socks5://", "", 1));
if url.starts_with("ssl://") {
if socks5.is_some() {
return Err(Error::SSLOverSocks5);
}
let url = url.replacen("ssl://", "", 1);
let client = RawClient::new_ssl(url.as_str(), true)?;
Ok(Client::SSL(client))
} else {
let url = url.replacen("tcp://", "", 1);
let client = match socks5 {
None => Client::TCP(RawClient::new(url.as_str())?),
Some(socks5) => Client::Socks5(RawClient::new_proxy(url.as_str(), socks5)?),
};
Ok(client)
}
}
}
impl ElectrumApi for Client {
#[inline]
fn batch_call(&self, batch: Batch) -> Result<Vec<serde_json::Value>, Error> {
impl_inner_call!(self, batch_call, batch)
}
#[inline]
fn block_headers_subscribe_raw(&self) -> Result<RawHeaderNotification, Error> {
impl_inner_call!(self, block_headers_subscribe_raw)
}
#[inline]
fn block_headers_pop_raw(&self) -> Result<Option<RawHeaderNotification>, Error> {
impl_inner_call!(self, block_headers_pop_raw)
}
#[inline]
fn block_header_raw(&self, height: usize) -> Result<Vec<u8>, Error> {
impl_inner_call!(self, block_header_raw, height)
}
#[inline]
fn block_headers(&self, start_height: usize, count: usize) -> Result<GetHeadersRes, Error> {
impl_inner_call!(self, block_headers, start_height, count)
}
#[inline]
fn estimate_fee(&self, number: usize) -> Result<f64, Error> {
impl_inner_call!(self, estimate_fee, number)
}
#[inline]
fn relay_fee(&self) -> Result<f64, Error> {
impl_inner_call!(self, relay_fee)
}
#[inline]
fn script_subscribe(&self, script: &Script) -> Result<Option<ScriptStatus>, Error> {
impl_inner_call!(self, script_subscribe, script)
}
#[inline]
fn script_unsubscribe(&self, script: &Script) -> Result<bool, Error> {
impl_inner_call!(self, script_unsubscribe, script)
}
#[inline]
fn script_pop(&self, script: &Script) -> Result<Option<ScriptStatus>, Error> {
impl_inner_call!(self, script_pop, script)
}
#[inline]
fn script_get_balance(&self, script: &Script) -> Result<GetBalanceRes, Error> {
impl_inner_call!(self, script_get_balance, script)
}
#[inline]
fn batch_script_get_balance<'s, I>(&self, scripts: I) -> Result<Vec<GetBalanceRes>, Error>
where
I: IntoIterator<Item = &'s Script>,
{
impl_inner_call!(self, batch_script_get_balance, scripts)
}
#[inline]
fn script_get_history(&self, script: &Script) -> Result<Vec<GetHistoryRes>, Error> {
impl_inner_call!(self, script_get_history, script)
}
#[inline]
fn batch_script_get_history<'s, I>(&self, scripts: I) -> Result<Vec<Vec<GetHistoryRes>>, Error>
where
I: IntoIterator<Item = &'s Script>,
{
impl_inner_call!(self, batch_script_get_history, scripts)
}
#[inline]
fn script_list_unspent(&self, script: &Script) -> Result<Vec<ListUnspentRes>, Error> {
impl_inner_call!(self, script_list_unspent, script)
}
#[inline]
fn batch_script_list_unspent<'s, I>(
&self,
scripts: I,
) -> Result<Vec<Vec<ListUnspentRes>>, Error>
where
I: IntoIterator<Item = &'s Script>,
{
impl_inner_call!(self, batch_script_list_unspent, scripts)
}
#[inline]
fn transaction_get_raw(&self, txid: &Txid) -> Result<Vec<u8>, Error> {
impl_inner_call!(self, transaction_get_raw, txid)
}
#[inline]
fn batch_transaction_get_raw<'t, I>(&self, txids: I) -> Result<Vec<Vec<u8>>, Error>
where
I: IntoIterator<Item = &'t Txid>,
{
impl_inner_call!(self, batch_transaction_get_raw, txids)
}
#[inline]
fn batch_block_header_raw<'s, I>(&self, heights: I) -> Result<Vec<Vec<u8>>, Error>
where
I: IntoIterator<Item = u32>,
{
impl_inner_call!(self, batch_block_header_raw, heights)
}
#[inline]
fn batch_estimate_fee<'s, I>(&self, numbers: I) -> Result<Vec<f64>, Error>
where
I: IntoIterator<Item = usize>,
{
impl_inner_call!(self, batch_estimate_fee, numbers)
}
#[inline]
fn transaction_broadcast_raw(&self, raw_tx: &[u8]) -> Result<Txid, Error> {
impl_inner_call!(self, transaction_broadcast_raw, raw_tx)
}
#[inline]
fn transaction_get_merkle(&self, txid: &Txid, height: usize) -> Result<GetMerkleRes, Error> {
impl_inner_call!(self, transaction_get_merkle, txid, height)
}
#[inline]
fn server_features(&self) -> Result<ServerFeaturesRes, Error> {
impl_inner_call!(self, server_features)
}
#[inline]
fn ping(&self) -> Result<(), Error> {
impl_inner_call!(self, ping)
}
#[inline]
#[cfg(feature = "debug-calls")]
fn calls_made(&self) -> usize {
impl_inner_call!(self, calls_made)
}
}
|
use std::io;
use std::str;
use thiserror::Error;
use crate::{header, point, reader, vlr, writer, Transform, Version};
/// Crate-specific error enum.
#[derive(Error, Debug)]
pub enum Error {
/// Feature is not supported by version.
#[error("feature {feature} is not supported by version {version}")]
#[allow(missing_docs)]
Feature {
version: Version,
feature: &'static str,
},
/// A wrapper around `las::header::Error`.
#[error(transparent)]
Header(#[from] header::Error),
/// The value can't have the inverse transform applied.
#[error("the transform {transform} cannot be inversely applied to {n}")]
#[allow(missing_docs)]
InverseTransform { n: f64, transform: Transform },
/// Wrapper around `std::io::Error`.
#[error(transparent)]
Io(#[from] io::Error),
/// The las data is laszip compressed.
#[error(
"the las data is laszip compressed, but laszip compression is not supported by this build"
)]
Laszip,
/// This string is not ASCII.
#[error("this string is not ascii: {0}")]
NotAscii(String),
/// These bytes are not zero-filled.
#[error("the bytes are not zero-filled: {0:?}")]
NotZeroFilled(Vec<u8>),
/// Wrapper around `las::point::Error`.
#[error(transparent)]
Point(#[from] point::Error),
/// Wrapper around `las::reader::Error`.
#[error(transparent)]
Reader(#[from] reader::Error),
/// This string is too long for the target slice.
#[error("string is too long for a slice of length {len}: {string}")]
#[allow(missing_docs)]
StringTooLong { string: String, len: usize },
/// Wrapper around `std::str::Utf8Error`.
#[error(transparent)]
Utf8(#[from] str::Utf8Error),
/// Wrapper around `las::writer::Error`.
#[error(transparent)]
Writer(#[from] writer::Error),
/// Wrapper around `las::vlr::Error`.
#[error(transparent)]
Vlr(#[from] vlr::Error),
/// Wrapper around `laz::LasZipError`
#[cfg(feature = "laz")]
#[error("laszip error: {0}")]
LasZipError(laz::LasZipError),
/// The Laszip vlr was not found, the points cannot be decompressed
#[cfg(feature = "laz")]
#[error("laszip vlr not found")]
LasZipVlrNotFound,
}
#[cfg(feature = "laz")]
impl From<laz::LasZipError> for Error {
fn from(error: laz::LasZipError) -> Error {
Error::LasZipError(error)
}
}
|
use specs::prelude::*;
use super::{RunState, gamelog::GameLog, GameClock, SeedClock, Seed, IsSown};
pub struct SeedSystem {}
impl<'a> System<'a> for SeedSystem {
#[allow(clippy::type_complexity)]
type SystemData = (
Entities<'a>,
WriteStorage<'a, Seed>,
WriteStorage<'a, SeedClock>,
ReadExpect<'a, RunState>,
WriteExpect<'a, GameLog>,
WriteStorage<'a, IsSown>
);
fn run(&mut self, data : Self::SystemData) {
let (entities, mut seeds, mut clocks, runstate, mut log, is_sowns) = data;
for (entity, mut seed, mut clock, is_sown) in (&entities, &seeds, &mut clocks, &is_sowns).join() {
let mut proceed = true;
match *runstate {
RunState::PlayerTurn => proceed = true,
_ => proceed = false
}
if proceed {
clock.duration += 1;
}
let mut season_log = String::new();
match clock.duration % seed.time_to_maturation {
0 => season_log = format!("{} matured ({}, {})", seed.name,
clock.duration, seed.time_to_maturation),
_ => season_log = String::new()
}
if season_log.len() > 0 {
log.entries.push(season_log);
}
}
}
}
|
use super::types;
use super::types::Type;
use crate::core::parse::ast::TypeID;
use std::fmt;
use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)]
pub enum Value<'a> {
Number(types::Number),
Function(types::Function<'a>),
String(types::String),
Bool(types::Bool),
Unit(types::Unit),
Enum(types::Enum<'a>),
}
impl<'a> Type for Value<'a> {
fn type_id(&self) -> TypeID {
match self {
Value::Number(n) => n.type_id(),
Value::Function(f) => f.type_id(),
Value::String(s) => s.type_id(),
Value::Bool(b) => b.type_id(),
Value::Unit(u) => u.type_id(),
Value::Enum(e) => e.type_id(),
}
}
}
impl<'a> fmt::Display for Value<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Value::Number(n) => write!(f, "{}", n.value),
Value::Bool(b) => write!(f, "{}", b.value),
Value::Function(function) => {
write!(f, "(fn ")?;
for arg in &function.args {
write!(f, " ({})", arg)?;
}
write!(f, " {:?})", function.body)
}
Value::String(s) => write!(f, "{}", s.value),
Value::Unit(_) => write!(f, "()"),
Value::Enum(e) => write!(f, "{}", e),
}
}
}
impl<'a> types::Function<'a> {
pub fn is_method(&self) -> bool {
self.args.get(0).is_some() && self.args[0] == "self"
}
}
|
// This file is part of lock-free-multi-producer-single-consumer-ring-buffer. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/lock-free-multi-producer-single-consumer-ring-buffer/master/COPYRIGHT. No part of lock-free-multi-producer-single-consumer-ring-buffer, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 - 2019 The developers of lock-free-multi-producer-single-consumer-ring-buffer. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/lock-free-multi-producer-single-consumer-ring-buffer/master/COPYRIGHT.
/// A ring buffer for sending lock-less bursts of messages.
///
/// Not particularly cheap to consume from (as it walks all producers) so try to use as few producers as possible and consume as much as possible with each call.
///
/// Multi-Producer, Single-Consumer (MP-SC).
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct RingBuffer<T: Sized>
{
ring_buffer_inner_non_null: NonNull<RingBufferInner<T>>,
inner_drop_handle: Arc<RingBufferInnerDropHandler<T>>,
marker: PhantomData<T>,
}
impl<T: Sized> Clone for RingBuffer<T>
{
#[inline(always)]
fn clone(&self) -> Self
{
Self
{
ring_buffer_inner_non_null: self.ring_buffer_inner_non_null,
inner_drop_handle: self.inner_drop_handle.clone(),
marker: PhantomData,
}
}
}
impl<T: Sized> RingBuffer<T>
{
/// Creates a new ring buffer and returns a consumer to it and producers for it.
///
/// When the last consumer or producer is dropped, the ring buffer is freed.
#[inline(always)]
pub fn new(capacity: usize, number_of_producers: usize) -> (RingBufferConsumer<T>, Vec<RingBufferProducer<T>>)
{
let ring_buffer_inner_non_null = RingBufferInner::allocate(capacity, number_of_producers);
let ring_buffer = Self
{
ring_buffer_inner_non_null,
inner_drop_handle: Arc::new(RingBufferInnerDropHandler(ring_buffer_inner_non_null)),
marker: PhantomData,
};
let mut ring_buffer_producer_inner_non_null = ring_buffer.reference().first_ring_buffer_producer_inner_non_null();
let mut producers = Vec::with_capacity(number_of_producers);
for _ in 0 .. number_of_producers
{
producers.push
(
RingBufferProducer
{
ring_buffer: ring_buffer.clone(),
ring_buffer_producer_inner_non_null,
}
);
ring_buffer_producer_inner_non_null = RingBufferInner::<T>::next_ring_buffer_producer_inner_non_null(ring_buffer_producer_inner_non_null);
}
(RingBufferConsumer(ring_buffer), producers)
}
#[inline(always)]
pub(crate) fn reference(&self) -> &RingBufferInner<T>
{
unsafe { & * self.ring_buffer_inner_non_null.as_ptr() }
}
}
|
use libc::{c_void, c_int, size_t, ssize_t, off_t};
#[repr(C)]
pub struct iovec {
pub iov_base: *mut c_void,
pub iov_len: size_t
}
#[cfg(target_os = "linux")]
extern {
pub fn pwritev(fd: c_int, iov: *const iovec, iovcnt: c_int, offset: off_t) -> ssize_t;
}
#[cfg(any(target_os = "macos", target_os="linux"))]
extern {
pub fn writev(fd: c_int, iov: *const iovec, iovcnt: c_int) -> ssize_t;
}
|
use crate::query_testing;
use anyhow::{Context, Result};
use std::{
fs,
io::{self, Write},
ops::Range,
path::Path,
time::Instant,
};
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
pub fn query_files_at_paths(
language: Language,
paths: Vec<String>,
query_path: &Path,
ordered_captures: bool,
byte_range: Option<Range<usize>>,
point_range: Option<Range<Point>>,
should_test: bool,
quiet: bool,
print_time: bool,
) -> Result<()> {
let stdout = io::stdout();
let mut stdout = stdout.lock();
let query_source = fs::read_to_string(query_path)
.with_context(|| format!("Error reading query file {:?}", query_path))?;
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
let mut query_cursor = QueryCursor::new();
if let Some(range) = byte_range {
query_cursor.set_byte_range(range);
}
if let Some(range) = point_range {
query_cursor.set_point_range(range);
}
let mut parser = Parser::new();
parser.set_language(language)?;
for path in paths {
let mut results = Vec::new();
writeln!(&mut stdout, "{}", path)?;
let source_code =
fs::read(&path).with_context(|| format!("Error reading source file {:?}", path))?;
let tree = parser.parse(&source_code, None).unwrap();
let start = Instant::now();
if ordered_captures {
for (mat, capture_index) in
query_cursor.captures(&query, tree.root_node(), source_code.as_slice())
{
let capture = mat.captures[capture_index];
let capture_name = &query.capture_names()[capture.index as usize];
if !quiet {
writeln!(
&mut stdout,
" pattern: {:>2}, capture: {} - {}, start: {}, end: {}, text: `{}`",
mat.pattern_index,
capture.index,
capture_name,
capture.node.start_position(),
capture.node.end_position(),
capture.node.utf8_text(&source_code).unwrap_or("")
)?;
}
results.push(query_testing::CaptureInfo {
name: capture_name.to_string(),
start: capture.node.start_position(),
end: capture.node.end_position(),
});
}
} else {
for m in query_cursor.matches(&query, tree.root_node(), source_code.as_slice()) {
if !quiet {
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
}
for capture in m.captures {
let start = capture.node.start_position();
let end = capture.node.end_position();
let capture_name = &query.capture_names()[capture.index as usize];
if !quiet {
if end.row == start.row {
writeln!(
&mut stdout,
" capture: {} - {}, start: {}, end: {}, text: `{}`",
capture.index,
capture_name,
start,
end,
capture.node.utf8_text(&source_code).unwrap_or("")
)?;
} else {
writeln!(
&mut stdout,
" capture: {}, start: {}, end: {}",
capture_name, start, end,
)?;
}
}
results.push(query_testing::CaptureInfo {
name: capture_name.to_string(),
start: capture.node.start_position(),
end: capture.node.end_position(),
});
}
}
}
if query_cursor.did_exceed_match_limit() {
writeln!(
&mut stdout,
" WARNING: Query exceeded maximum number of in-progress captures!"
)?;
}
if should_test {
query_testing::assert_expected_captures(results, path, &mut parser, language)?
}
if print_time {
writeln!(&mut stdout, "{:?}", start.elapsed())?;
}
}
Ok(())
}
|
mod schip8;
use schip8::SChip8;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use web_sys::CanvasRenderingContext2d;
use web_sys::HtmlCanvasElement;
#[wasm_bindgen(start)]
pub fn start() {
let window = web_sys::window().unwrap();
let canvas = window
.document()
.unwrap()
.get_element_by_id("canvas")
.unwrap()
.dyn_into::<HtmlCanvasElement>()
.unwrap();
let context = canvas
.get_context("2d")
.unwrap()
.unwrap()
.dyn_into::<CanvasRenderingContext2d>()
.unwrap();
let tetris = include_bytes!("../../roms/chip8/TETRIS").to_vec();
let mut schip8 = SChip8::new(tetris);
loop {
let quit = schip8.run(20);
if !quit {
break;
}
}
}
// Since the screen array must be private, use this for rendering
pub fn render(
&self,
canvas: &web_sys::HtmlCanvasElement,
context: &web_sys::CanvasRenderingContext2d,
) {
context.fill_rect(0.0, 0.0, canvas.width() as f64, canvas.height() as f64);
let pixel_w = canvas.width() as usize / self.screen_width;
let pixel_h = canvas.height() as usize / self.screen_height;
for y in 0..self.screen_height {
for x in 0..self.screen_width {
let pixel = self.screen[y * SCHIP8_SCREEN_WIDTH + x];
if pixel == 0 {
context.set_fill_style(&JsValue::from_str("black"));
} else {
context.set_fill_style(&JsValue::from_str("white"));
}
context.fill_rect(
(x * pixel_w) as f64,
(y * pixel_h) as f64,
pixel_w as f64,
pixel_h as f64,
);
//log!("Running");
}
}
}
|
#[macro_use]
extern crate diesel;
use std::io::{Read, Write};
use std::net::TcpStream;
use std::str::from_utf8;
use byteorder::{ByteOrder, LittleEndian};
use diesel::prelude::*;
use diesel::mysql::MysqlConnection;
use dotenv::dotenv;
use std::env;
use std::time::Duration;
use device_query::{DeviceQuery, DeviceState, Keycode};
use std::time::Instant;
use std::f32::consts::PI;
use self::models::*;
pub mod schema;
pub mod models;
#[derive(Queryable)]
pub struct Otis_AK{
pub id: i32,
pub PITCH: f32,
pub YAW: f32,
pub OUTPUT1: f32,
pub OUTPUT2: f32,
}
pub fn save_data<'a>(conn: &MysqlConnection,
PITCH: &'a f32,
YAW: &'a f32,
OUTPUT1: &'a f32,
OUTPUT2: &'a f32,) {
use schema::OtisData;
let new_data = NewData{
PITCH:PITCH,
YAW:YAW,
OUTPUT1:OUTPUT1,
OUTPUT2:OUTPUT2,
};
diesel::insert_into(OtisData::table)
.values(&new_data)
.execute(conn); //alt use get_result()
//expect("Error saving new post");
}
pub fn establish_connection() -> MysqlConnection {
dotenv().ok();
let database_url = env::var("DATABASE_URL")
.expect("DATABASE_URL must be set");
MysqlConnection::establish(&database_url)
.expect(&format!("Error connecting to {}", database_url))
}
fn main () -> std::io::Result<()>{
let device_state = DeviceState::new();
// Initial connection with database
let connection = establish_connection();
// Clear main database table on start
connection.execute("TRUNCATE TABLE OtisData").unwrap();
// Initial TCP connection with Arduino
let mut stream = attempt_arduino_connection();
let mut data = [0 as u8; 8];
stream.read(&mut data);
// Read 1st msg
let text = from_utf8(&data).unwrap();
println!("{}", text);
let sent_time = Instant::now();
loop{
stream.read(&mut data);
if data == [0 as u8; 8]{
println!("Connection lost");
stream = attempt_arduino_connection();
}
let keys: Vec<Keycode> = device_state.get_keys();
if (sent_time.elapsed().as_millis() > 10){
if(keys.contains(&Keycode::I)){
stream.write(&[1]);
let sent_time = Instant::now();
}
if(keys.contains(&Keycode::K)){
stream.write(&[2]);
let sent_time = Instant::now();
}
if(keys.contains(&Keycode::O)){
stream.write(&[3]);
let sent_time = Instant::now();
}
if(keys.contains(&Keycode::L)){
stream.write(&[4]);
let sent_time = Instant::now();
}
if(keys.contains(&Keycode::J)){
stream.write(&[5]);
let sent_time = Instant::now();
}
if(keys.contains(&Keycode::P)){
stream.write(&[6]);
let sent_time = Instant::now();
}
if(keys.contains(&Keycode::F)){
stream.write(&[7]);
let sent_time = Instant::now();
}
}
// decode data stream
let mut p = [data[0] as u8, data[1] as u8];
let p_float: f32 = ((LittleEndian::read_u16(&p) as f32) / 10436.381 - PI) * 180.0/(PI) ;
let mut y = [data[2] as u8, data[3] as u8];
let y_float: f32 = ((LittleEndian::read_u16(&y) as f32) / 10436.381 - PI) * 180.0/(PI) ;
let mut o = [data[4] as u8, data[5] as u8];
let o_float: f32 = (LittleEndian::read_u16(&o) as f32) / 33.0 - 1000.0 ;
let mut g = [data[6] as u8, data[7] as u8];
let g_float: f32 = (LittleEndian::read_u16(&g) as f32) / 33.0 - 1000.0 ;
println!("P: {}, Y: {}, O: {}, G: {}", p_float, y_float, o_float, g_float);
// save data to mysql server
save_data(&connection, &p_float, &y_float, &o_float, &g_float);
// reset data buffer
data = [0 as u8; 8];
}
}
pub fn attempt_arduino_connection () -> std::net::TcpStream {
// attempt to connect to arduino on loop
let mut stream = loop {
/* "192.168.50.45:80" for MKR on older twip */
/* "192.168.50.181:80" for newer robot */
if let Ok(stream) = TcpStream::connect("192.168.50.181:80") {
println!("Connected to the server!");
break stream;
} else {
// println!("Couldn't connect to server...");
}
};
// Set timeout condition to 5 seconds
stream.set_read_timeout(Some(Duration::new(5, 0))).expect("set_read_timeout failed");
stream.write(&[3]).expect("Failed to send data to Arduino");
stream
}
|
fn main() {
let features: &[u16] = &[1, 2];
let families: &[u16] = &[0, 1];
let constraint_a: &[[u16; 3]] = &[[0, 1, 0], [1, 0, 1], [1, 2, 0]];
let constraint_b: &[[u16; 3]] = &[[0, 1, 1], [1, 0, 1], [1, 2, 0]];
let constraint_c: &[[u16; 3]] = &[[2, 3, 0], [2, 4, 1], [3, 5, 0], [1, 2, 0]];
let constraint_d: &[[u16; 3]] = &[[2, 3, 1], [2, 4, 0], [3, 5, 1], [1, 2, 1]];
let rule1: &[&[[u16; 3]]] = &[constraint_a, constraint_b];
let rule2: &[&[[u16; 3]]] = &[constraint_c, constraint_d];
let all_rules: &[&[&[[u16; 3]]]] = &[rule1, rule2];
let loops: u32 = 1000000;
let mut counter: u32 = 0;
for _i in 0..loops {
if check_unconstrained(all_rules, families, features) {
counter = counter + 1;
}
}
println!("{}", counter)
}
fn check_unconstrained(all_rules: &[&[&[[u16; 3]]]], families: &[u16], features: &[u16]) -> bool {
return if all_rules.is_empty() {
true
} else if path_through_constraints(&all_rules[0], families, features) {
check_unconstrained(&all_rules[1..], families, features)
} else {
false
}
}
fn path_through_constraints(constraints: &[&[[u16; 3]]], families: &[u16], features: &[u16]) -> bool {
return if constraints.is_empty() {
false
} else if constraint_check(&constraints[0], families, features) {
true
} else {
path_through_constraints(&constraints[1..], families, features)
}
}
fn constraint_check(constraint: &[[u16; 3]], families: &[u16], features: &[u16]) -> bool {
return if constraint.is_empty() {
true
} else if check_this_feature(&constraint[0], families, features) {
return constraint_check(&constraint[1..], families, features)
} else {
false
};
}
fn check_this_feature(this_feature: &[u16; 3], families: &[u16], features: &[u16]) -> bool {
let have_feature: &bool = &have(this_feature);
let family_in: &bool = &family_included(this_feature, families);
return if *have_feature && *family_in {
features.contains(&this_feature[1])
} else if !*have_feature && *family_in {
!features.contains(&this_feature[1])
} else {
true
};
}
fn have(this_feature: &[u16; 3]) -> bool {
return this_feature[2] == 0;
}
fn family_included(this_feature: &[u16; 3], families: &[u16]) -> bool {
return families.contains(&this_feature[0]);
}
// Compile command: rustc -o compare compare.rs; |
//! [ExtraData](https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-shllink/c41e062d-f764-4f13-bd4f-ea812ab9a4d1) related structs
mod tracker_data_block;
use std::io::{Result, Seek, Read, Cursor};
use byteorder::{LittleEndian, ReadBytesExt};
use serde::Serialize;
use tracker_data_block::TrackerDataBlock;
/// ExtraData types implemented
#[derive(Debug, Serialize)]
pub enum ExtraDataTypes {
Tracker(TrackerDataBlock)
}
/// ExtraData refers to a set of structures that convey additional information about a link target.
/// These optional structures can be present in an extra data section that is appended to the basic Shell Link Binary File Format.
#[derive(Debug, Serialize)]
pub struct ExtraData {
pub extra_data_blocks: Vec<ExtraDataTypes>
}
impl ExtraData {
pub fn from_buffer(buf: &[u8]) -> Result<Self>{
Self::from_reader(&mut Cursor::new(buf))
}
pub fn from_reader<R: Read+Seek>(r: &mut R) -> Result<Self>{
let mut extra_data_blocks: Vec<ExtraDataTypes> = Vec::new();
loop {
let size = r.read_u32::<LittleEndian>()?;
if size == 0 {
break;
}
let signature = r.read_u32::<LittleEndian>()?;
let mut extra_data_bytes = vec![0;(size - 8) as usize];
r.read_exact(&mut extra_data_bytes)?;
match signature {
0xa0000003 => { extra_data_blocks.push(ExtraDataTypes::Tracker(TrackerDataBlock::from_buffer(&extra_data_bytes)?)); },
_ => {}
};
}
Ok(Self {
extra_data_blocks
})
}
} |
use std::mem;
struct Fibonacci {
curr: u32,
next: u32,
}
impl Iterator for Fibonacci {
type Item = u32;
fn next(&mut self) -> Option<u32> {
let new_next = self.curr + self.next;
let new_curr = mem::replace(&mut self.next, new_next);
Some(mem::replace(&mut self.curr, new_curr))
}
}
fn fibs() -> Fibonacci {
Fibonacci { curr: 1, next : 2 }
}
#[test]
fn problem2_generates_a_sequence() {
let actual = fibs().take(10).collect::<Vec<u32>>();
assert_eq!(actual, [1, 2, 3, 5, 8, 13, 21, 34, 55, 89]);
}
#[test]
fn problem2_solves() {
let actual = fibs()
.filter(|&x| x % 2 == 0)
.take_while(|&x| x <= 4000000)
.fold(0, |a, b| a + b);
assert_eq!(actual, 4613732)
}
|
//! How client and server communicate. Usually a client will send the server
//! some action it wants to execute, and the server will send some other action,
//! possibly the very one the the client sent, and possibly to other clients
//! other than the sender. It should send the action to all clients affected by
//! the execution of the action the server sends, so that all clients can update
//! their local state to be consistent with the server.
use super::client::Client;
use super::id::Id;
/// An action that comes directly from the client. This comes from over the
/// network.
#[derive(Deserialize, Debug)]
#[serde(tag = "type")]
pub enum FromClient {
/// The client wants to set their name.
SetName { name: String },
}
/// An action that gets sent from various sources, like the `Handler`s and the
/// `Factory`, to the server. This is the only way to describe a change in the
/// state of the `Server`. Each variant contains an `Id`, which is the `Id` of
/// the `Client` that caused this `ToServer` to be sent.
#[derive(Debug)]
pub enum ToServer {
/// Make a `Client`. Should be sent when a client connects.
MkClient(Id, Client),
/// Remove a `Client`. Should be sent when a client disconnects, or somehow
/// misbehaves, e.g. sends us an unintelligible message.
RmClient(Id),
/// A `Client` has sent us something intelligible. Note, however, that being
/// 'intelligible' merely means 'we were able to parse it into a
/// `FromClient`'. It does not mean that that `FromClient` describes a change
/// to the `Server` state that would 'make sense'.
FromClient(Id, FromClient),
}
/// An action that gets sent from the `Server` to the `Client`.
#[derive(Serialize, Debug)]
#[serde(tag = "type")]
pub enum ToClient {
/// Set the `Id` of a `Client`.
SetId { id: Id },
/// A `Client` was made.
MkClient { id: Id, name: String },
/// A `Client` was removed.
RmClient { id: Id },
}
|
#![warn(missing_docs)]
#![feature(box_syntax, box_patterns)]
//! A Rust implementation of chapter 4 of Benjamin C. Pierce's "Types and Programming Languages"
//! `arith` language.
//!
//! c.f. https://www.cis.upenn.edu/~bcpierce/tapl/checkers/arith/core.ml for the sample OCaml
//! implementation.
//!
//! # Example
//!
//! This library exposes an `eval` function which takes any `Term` and repeatedly evaluates it
//! according to our evaluation rules until no rule applies.
//!
//! ```rust
//! #![feature(box_syntax)]
//! use arith::eval;
//! use arith::Term::{Succ, Pred, Zero, IsZero, True};
//!
//! let succ_zero = Succ(box Zero);
//! let pred_succ_zero = Pred(box succ_zero);
//! let iszero_pred_succ_zero = IsZero(box pred_succ_zero);
//!
//! assert_eq!(True, eval(&iszero_pred_succ_zero));
//! ```
pub use syntax::Term;
pub use evaluation::eval;
pub use grammar::parse_Term as parse;
mod syntax;
mod evaluation;
mod grammar;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn arith() {
assert_eq!(Ok(Term::True), parse("true"));
assert_eq!(Ok(Term::False), parse("false"));
assert_eq!(Ok(Term::Zero), parse("0"));
assert_eq!(Ok(Term::Pred(Box::new(Term::Zero))), parse("pred 0"));
assert_eq!(Ok(Term::Succ(Box::new(Term::Zero))), parse("succ 0"));
assert_eq!(Ok(Term::IsZero(Box::new(Term::Zero))), parse("iszero 0"));
assert_eq!(Ok(Term::If(Box::new(Term::True),
Box::new(Term::Zero),
Box::new(Term::Succ(Box::new(Term::Zero))))),
parse("if true then 0 else succ 0"));
assert_eq!(Ok(Term::Succ(Box::new(Term::Pred(Box::new(Term::Zero))))),
parse("succ pred 0"));
}
}
|
//! Deserializer implementation for ShopSite `.aa` files.
//!
//! # Parsing Is Not Strict
//!
//! Because there is no public specification for the format of `.aa` files, and all format details are inferred from the `.aa` files that ShopSite itself generates, this parser is not strict about what it will accept as valid. In particular, this parser will:
//!
//! * Skip over lines containing only whitespace characters
//! * Allow comments to begin after any number of whitespace characters
//! * Understand `:` delimiters that are not followed by a space character
//!
//! ShopSite itself may or may not be so forgiving. This parser is not designed to be used as a validator.
//!
//! In other words, just because this parser doesn't reject or misunderstand a `.aa` file doesn't mean ShopSite won't reject or misunderstand it!
use serde::de::Deserialize;
use std::{
fs::File,
io::{self, BufRead, BufReader},
path::Path,
rc::Rc
};
mod position;
pub use position::*;
mod error;
pub use error::*;
mod parser_io;
use parser_io::*;
mod deser_toplevel;
mod deser_value;
use deser_value::*;
pub struct Deserializer<R: BufRead> {
/// Source of input bytes.
reader: R,
/// Buffer of bytes read from the input source for the current line.
///
/// Parsing occurs at the byte level, since this format is always Windows-1252 and it's faster and simpler to parse byte-by-byte without dealing with UTF-8's variable-width characters.
buf_b: Vec<u8>,
/// Buffer of decoded text from the input source.
///
/// Note that this doesn't contain the entire line decoded. Rather, individual chunks of text are taken from `buf_b`, decoded, and then slices of this string are passed to the deserialize routines. This string is then cleared on every new line.
buf_s: String,
/// Where in the file the parser is currently looking.
pos: Position,
/// The last byte that was read.
last_byte: u8,
/// The next byte that will be read.
///
/// This is set to `Some` when `peek_byte` is called. When `read_byte` is called, it will first return this byte before reading any more from the reader.
peeked_byte: Option<u8>,
/// Initially `false`. Set to true upon reaching end-of-file.
reached_eof: bool
}
impl<R: BufRead> Deserializer<R> {
pub fn new(reader: R, file: Option<Rc<Path>>) -> Deserializer<R> {
Deserializer {
reader,
pos: Position {
file: file.into(),
line: 1,
column: 1
},
buf_b: Vec::with_capacity(4096),
buf_s: String::with_capacity(4096),
last_byte: 0,
peeked_byte: None,
reached_eof: false
}
}
}
pub fn from_reader<'de, T: Deserialize<'de>, R: BufRead>(reader: R, path: Option<Rc<Path>>) -> Result<T> {
let mut deserializer = Deserializer::new(reader, path);
let result = T::deserialize(&mut deserializer)?;
Ok(result)
}
pub fn from_bytes<'de, T: Deserialize<'de>>(bytes: &[u8], file: Option<Rc<Path>>) -> Result<T> {
from_reader(io::Cursor::new(bytes), file)
}
pub fn from_file<'de, T: Deserialize<'de>>(file: Rc<Path>) -> Result<T> {
let file = file.into();
match File::open(&file) {
Ok(fh) => from_reader(BufReader::new(fh), Some(file)),
Err(error) => Err(Error::Io { error, file: Some(file) })
}
}
|
#[doc = "Register `CDCFGR2` reader"]
pub type R = crate::R<CDCFGR2_SPEC>;
#[doc = "Register `CDCFGR2` writer"]
pub type W = crate::W<CDCFGR2_SPEC>;
#[doc = "Field `CDPPRE1` reader - CPU domain APB1 prescaler Set and reset by software to control the CPU domain APB1 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE1 write. 0xx: rcc_pclk1 = rcc_hclk1 (default after reset)"]
pub type CDPPRE1_R = crate::FieldReader<CDPPRE1_A>;
#[doc = "CPU domain APB1 prescaler Set and reset by software to control the CPU domain APB1 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE1 write. 0xx: rcc_pclk1 = rcc_hclk1 (default after reset)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum CDPPRE1_A {
#[doc = "0: rcc_hclk not divided"]
Div1 = 0,
#[doc = "4: rcc_hclk divided by 2"]
Div2 = 4,
#[doc = "5: rcc_hclk divided by 4"]
Div4 = 5,
#[doc = "6: rcc_hclk divided by 8"]
Div8 = 6,
#[doc = "7: rcc_hclk divided by 16"]
Div16 = 7,
}
impl From<CDPPRE1_A> for u8 {
#[inline(always)]
fn from(variant: CDPPRE1_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for CDPPRE1_A {
type Ux = u8;
}
impl CDPPRE1_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CDPPRE1_A> {
match self.bits {
0 => Some(CDPPRE1_A::Div1),
4 => Some(CDPPRE1_A::Div2),
5 => Some(CDPPRE1_A::Div4),
6 => Some(CDPPRE1_A::Div8),
7 => Some(CDPPRE1_A::Div16),
_ => None,
}
}
#[doc = "rcc_hclk not divided"]
#[inline(always)]
pub fn is_div1(&self) -> bool {
*self == CDPPRE1_A::Div1
}
#[doc = "rcc_hclk divided by 2"]
#[inline(always)]
pub fn is_div2(&self) -> bool {
*self == CDPPRE1_A::Div2
}
#[doc = "rcc_hclk divided by 4"]
#[inline(always)]
pub fn is_div4(&self) -> bool {
*self == CDPPRE1_A::Div4
}
#[doc = "rcc_hclk divided by 8"]
#[inline(always)]
pub fn is_div8(&self) -> bool {
*self == CDPPRE1_A::Div8
}
#[doc = "rcc_hclk divided by 16"]
#[inline(always)]
pub fn is_div16(&self) -> bool {
*self == CDPPRE1_A::Div16
}
}
#[doc = "Field `CDPPRE1` writer - CPU domain APB1 prescaler Set and reset by software to control the CPU domain APB1 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE1 write. 0xx: rcc_pclk1 = rcc_hclk1 (default after reset)"]
pub type CDPPRE1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, CDPPRE1_A>;
impl<'a, REG, const O: u8> CDPPRE1_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "rcc_hclk not divided"]
#[inline(always)]
pub fn div1(self) -> &'a mut crate::W<REG> {
self.variant(CDPPRE1_A::Div1)
}
#[doc = "rcc_hclk divided by 2"]
#[inline(always)]
pub fn div2(self) -> &'a mut crate::W<REG> {
self.variant(CDPPRE1_A::Div2)
}
#[doc = "rcc_hclk divided by 4"]
#[inline(always)]
pub fn div4(self) -> &'a mut crate::W<REG> {
self.variant(CDPPRE1_A::Div4)
}
#[doc = "rcc_hclk divided by 8"]
#[inline(always)]
pub fn div8(self) -> &'a mut crate::W<REG> {
self.variant(CDPPRE1_A::Div8)
}
#[doc = "rcc_hclk divided by 16"]
#[inline(always)]
pub fn div16(self) -> &'a mut crate::W<REG> {
self.variant(CDPPRE1_A::Div16)
}
}
#[doc = "Field `CDPPRE2` reader - CPU domain APB2 prescaler Set and reset by software to control the CPU domain APB2 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE2 write. 0xx: rcc_pclk2 = rcc_hclk1 (default after reset)"]
pub use CDPPRE1_R as CDPPRE2_R;
#[doc = "Field `CDPPRE2` writer - CPU domain APB2 prescaler Set and reset by software to control the CPU domain APB2 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE2 write. 0xx: rcc_pclk2 = rcc_hclk1 (default after reset)"]
pub use CDPPRE1_W as CDPPRE2_W;
impl R {
#[doc = "Bits 4:6 - CPU domain APB1 prescaler Set and reset by software to control the CPU domain APB1 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE1 write. 0xx: rcc_pclk1 = rcc_hclk1 (default after reset)"]
#[inline(always)]
pub fn cdppre1(&self) -> CDPPRE1_R {
CDPPRE1_R::new(((self.bits >> 4) & 7) as u8)
}
#[doc = "Bits 8:10 - CPU domain APB2 prescaler Set and reset by software to control the CPU domain APB2 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE2 write. 0xx: rcc_pclk2 = rcc_hclk1 (default after reset)"]
#[inline(always)]
pub fn cdppre2(&self) -> CDPPRE2_R {
CDPPRE2_R::new(((self.bits >> 8) & 7) as u8)
}
}
impl W {
#[doc = "Bits 4:6 - CPU domain APB1 prescaler Set and reset by software to control the CPU domain APB1 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE1 write. 0xx: rcc_pclk1 = rcc_hclk1 (default after reset)"]
#[inline(always)]
#[must_use]
pub fn cdppre1(&mut self) -> CDPPRE1_W<CDCFGR2_SPEC, 4> {
CDPPRE1_W::new(self)
}
#[doc = "Bits 8:10 - CPU domain APB2 prescaler Set and reset by software to control the CPU domain APB2 clock division factor. The clock is divided by the new prescaler factor from 1 to 16 cycles of rcc_hclk1 after CDPPRE2 write. 0xx: rcc_pclk2 = rcc_hclk1 (default after reset)"]
#[inline(always)]
#[must_use]
pub fn cdppre2(&mut self) -> CDPPRE2_W<CDCFGR2_SPEC, 8> {
CDPPRE2_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cdcfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cdcfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CDCFGR2_SPEC;
impl crate::RegisterSpec for CDCFGR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cdcfgr2::R`](R) reader structure"]
impl crate::Readable for CDCFGR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cdcfgr2::W`](W) writer structure"]
impl crate::Writable for CDCFGR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CDCFGR2 to value 0"]
impl crate::Resettable for CDCFGR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate build_probe_mpi;
extern crate bindgen;
use std::env;
use std::path::PathBuf;
fn main() {
// Allow user to set PETSc paths from environment variables.
let petsc_include_dir: PathBuf = [env::var("PETSC_DIR").unwrap(),
String::from("include")].iter().collect();
let petsc_arch_include_dir: PathBuf = [env::var("PETSC_DIR").unwrap(),
env::var("PETSC_ARCH").unwrap(), String::from("include")].iter().collect();
let petsc_lib_dir = PathBuf::from(env::var("PETSC_LIB").unwrap());
// Tell Cargo to link the PETSc, LAPACK, and BLAS libraries.
println!("cargo:rustc-link-search={}", petsc_lib_dir.display());
println!("cargo:rustc-link-lib=petsc");
println!("cargo:rustc-link-lib=flapack");
println!("cargo:rustc-link-lib=fblas");
println!("cargo:rustc-link-lib=gfortran");
// Find the system MPI library and headers,
// in the same way as rsmpi/build.rs.
let mpi_lib = match build_probe_mpi::probe() {
Ok(mpi_lib) => mpi_lib,
Err(errs) => {
println!("Could not find MPI library for various reasons:\n");
for (i, err) in errs.iter().enumerate() {
println!("Reason #{}:\n{}\n", i, err);
}
panic!();
}
};
// Tell Cargo to link the MPI libraries, as in rsmpi/build.rs.
// TODO - some of the MPI library linking here and MPI library path-setting
// below may be unnecessary.
for dir in &mpi_lib.lib_paths {
println!("cargo:rustc-link-search=native={}", dir.display());
}
for lib in &mpi_lib.libs {
println!("cargo:rustc-link-lib={}", lib);
}
// Set up builder with MPI and PETSc library and include paths.
let mut builder = bindgen::Builder::default();
for lib in &mpi_lib.libs {
builder = builder.link(lib.clone());
}
for dir in &mpi_lib.lib_paths {
builder = builder.clang_arg(format!("-L{}", dir.display()));
}
for dir in &mpi_lib.include_paths {
builder = builder.clang_arg(format!("-I{}", dir.display()));
}
builder = builder.link("petsc");
builder = builder.clang_arg(format!("-L{}", petsc_lib_dir.display()));
builder = builder.clang_arg(format!("-I{}", petsc_include_dir.display()));
builder = builder.clang_arg(format!("-I{}", petsc_arch_include_dir.display()));
// Generate PETSc bindings.
// Hide types which generate duplicate definitions:
// https://stackoverflow.com/a/34379937
let bindings = builder
.header("wrapper.h")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.generate()
.expect("Unable to generate PETSc bindings");
// Write out PETSc bindings.
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write PETSc bindings");
}
|
use super::hitable::HitRecord;
use super::ray::Ray;
use super::vec3::Vec3;
fn random_in_unit_sphere() -> Vec3 {
let mut p: Vec3;
loop {
p =
2.0 * Vec3::new(
rand::random::<f32>(),
rand::random::<f32>(),
rand::random::<f32>(),
) - Vec3::ones();
if p.squared_length() < 1.0 {
break;
}
}
return p;
}
pub trait Material {
fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<(Vec3, Ray)>;
}
pub struct Lambertian {
albedo: Vec3,
}
impl Lambertian {
pub fn new(albedo: Vec3) -> Lambertian {
Lambertian { albedo }
}
}
impl Material for Lambertian {
fn scatter(&self, _ray: &Ray, hit: &HitRecord) -> Option<(Vec3, Ray)> {
let target = hit.p + hit.normal + random_in_unit_sphere();
Some((self.albedo, Ray::new(hit.p, target - hit.p)))
}
}
pub struct Metal {
albedo: Vec3,
fuzz: f32,
}
impl Metal {
pub fn new(albedo: Vec3, fuzz: f32) -> Metal {
Metal {
albedo,
fuzz: fuzz.min(1.0),
}
}
}
impl Material for Metal {
fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<(Vec3, Ray)> {
let reflected = ray.direction().unit_vector().reflect(hit.normal);
let scattered = Ray::new(hit.p, reflected + self.fuzz * random_in_unit_sphere());
if scattered.direction().dot(hit.normal) > 0.0 {
Some((self.albedo, scattered))
} else {
None
}
}
}
pub struct Dielectric {
refraction_index: f32,
}
impl Dielectric {
pub fn new(refraction_index: f32) -> Dielectric {
Dielectric { refraction_index }
}
fn schlick(&self, cosine: f32) -> f32 {
let r0 = ((1.0 - self.refraction_index) / (1.0 + self.refraction_index)).powf(2.0);
r0 + (1.0 - r0) * (1.0 - cosine).powf(5.0)
}
}
impl Material for Dielectric {
fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<(Vec3, Ray)> {
let reflected = ray.direction().reflect(hit.normal);
let attenuation = Vec3::ones();
let outward_normal;
let ni_over_nt;
let reflect_prob;
let cosine;
if ray.direction().dot(hit.normal) > 0.0 {
outward_normal = -1.0 * hit.normal;
ni_over_nt = self.refraction_index;
cosine =
self.refraction_index * ray.direction().dot(hit.normal) / ray.direction().length();
} else {
outward_normal = hit.normal;
ni_over_nt = 1.0 / self.refraction_index;
cosine = -1.0 * ray.direction().dot(hit.normal) / ray.direction().length();
}
if let Some(refracted) = ray.direction().refract(outward_normal, ni_over_nt) {
reflect_prob = self.schlick(cosine);
if rand::random::<f32>() < reflect_prob {
return Some((attenuation, Ray::new(hit.p, reflected)));
}
return Some((attenuation, Ray::new(hit.p, refracted)));
} else {
return Some((attenuation, Ray::new(hit.p, reflected)));
}
}
}
|
use crate::errors::*;
use crate::request::{FilterOptions, ModelRequest, RequestDetails, RequestParameters, ZohoRequest};
use reqwest::Method;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
pub mod comment;
pub(crate) fn model_path(
portal: impl std::fmt::Display,
project: impl std::fmt::Display,
) -> String {
format!("portal/{}/projects/{}/forums/", portal, project)
}
#[derive(Clone, Debug)]
pub struct ForumRequest(RequestDetails);
impl ForumRequest {
pub fn new(access_token: &str, model_path: &str, id: Option<i64>) -> Self {
ForumRequest(RequestDetails::new(access_token, model_path, id))
}
pub fn iter_get(self) -> ForumIterator {
ForumIterator::new(self)
}
}
impl ModelRequest for ForumRequest {
fn uri(&self) -> String {
self.0.uri()
}
fn params(&self) -> Option<HashMap<String, String>> {
self.0.params()
}
fn access_token(&self) -> String {
self.0.access_token()
}
fn filter(self, _param: (impl FilterOptions + std::fmt::Display)) -> Self {
self
}
}
impl RequestParameters for ForumRequest {
type ModelCollection = ZohoForums;
type NewModel = NewForum;
}
impl ForumRequest {
pub fn follow(&self) -> Result<Option<Response>> {
let mut url = self.uri();
url.push_str("follow");
ZohoRequest::<NewForum>::new(Method::POST, &url, None, self.access_token(), self.params())
.send()
}
pub fn unfollow(&self) -> Result<Option<Response>> {
let mut url = self.uri();
url.push_str("unfollow");
ZohoRequest::<NewForum>::new(Method::POST, &url, None, self.access_token(), self.params())
.send()
}
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct ZohoForums {
#[serde(rename = "forums")]
pub forums: Vec<Forum>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct Forum {
#[serde(rename = "id")]
pub id: i64,
#[serde(rename = "name")]
pub name: String,
#[serde(rename = "content")]
pub content: String,
#[serde(rename = "is_sticky_post")]
pub is_sticky_post: bool,
#[serde(rename = "is_announcement_post")]
pub is_announcement_post: bool,
#[serde(rename = "posted_by")]
pub posted_by: String,
#[serde(rename = "posted_person")]
pub posted_person: String,
#[serde(rename = "post_date")]
pub post_date: String,
#[serde(rename = "post_date_long")]
pub post_date_long: i64,
#[serde(rename = "link")]
pub link: Link,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct Link {
#[serde(rename = "self")]
pub self_link: SelfLink,
}
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
pub struct SelfLink {
#[serde(rename = "url")]
pub url: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct NewForum {
#[serde(rename = "name")]
pub name: String,
#[serde(rename = "content")]
pub content: String,
#[serde(rename = "is_sticky_post")]
pub sticky: bool,
#[serde(rename = "is_announcement_post")]
pub announcement: bool,
#[serde(rename = "category_id")]
pub category_id: i64,
//TODO(Xymist): notify, flag, type, file
}
#[allow(dead_code)]
#[derive(Clone, Debug, Deserialize, Default)]
pub struct Response {
response: String,
}
#[derive(Debug, Clone)]
pub struct ForumIterator {
pub items: <Vec<Forum> as IntoIterator>::IntoIter,
pub last_full: bool,
pub request: ForumRequest,
}
impl ForumIterator {
pub fn new(request: ForumRequest) -> ForumIterator {
ForumIterator {
items: Vec::new().into_iter(),
last_full: true,
request,
}
}
pub fn try_next(&mut self) -> Result<Option<Forum>> {
// If there are still items in the local cache from the last request, use the next one of those.
if let Some(forum) = self.items.next() {
return Ok(Some(forum));
}
// If we didn't get a full 100 (the default number to retrieve) the last time, then we must have
// run out in Zoho; don't request any more.
if !self.last_full {
return Ok(None);
}
let returned_forums = self.request.clone().get()?;
if let Some(forum_list) = returned_forums {
self.last_full = forum_list.forums.len() as i8 == 100;
self.items = forum_list.forums.into_iter();
Ok(self.items.next())
} else {
Ok(None)
}
}
}
impl Iterator for ForumIterator {
type Item = Result<Forum>;
fn next(&mut self) -> Option<Self::Item> {
match self.try_next() {
Ok(Some(val)) => Some(Ok(val)),
Ok(None) => None,
Err(err) => Some(Err(err)),
}
}
}
|
#[doc = "Register `EXTI_HWCFGR5` reader"]
pub type R = crate::R<EXTI_HWCFGR5_SPEC>;
#[doc = "Field `CPUEVENT` reader - CPUEVENT"]
pub type CPUEVENT_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - CPUEVENT"]
#[inline(always)]
pub fn cpuevent(&self) -> CPUEVENT_R {
CPUEVENT_R::new(self.bits)
}
}
#[doc = "EXTI hardware configuration register 5\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exti_hwcfgr5::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct EXTI_HWCFGR5_SPEC;
impl crate::RegisterSpec for EXTI_HWCFGR5_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`exti_hwcfgr5::R`](R) reader structure"]
impl crate::Readable for EXTI_HWCFGR5_SPEC {}
#[doc = "`reset()` method sets EXTI_HWCFGR5 to value 0x000e_ffff"]
impl crate::Resettable for EXTI_HWCFGR5_SPEC {
const RESET_VALUE: Self::Ux = 0x000e_ffff;
}
|
pub mod external_command;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.