text stringlengths 8 4.13M |
|---|
use crate::zgroup::GroupEvent::{LeaseExpired, NewGroupView};
use async_std::sync::{Arc, Condvar, Mutex};
use async_std::task::JoinHandle;
use flume::{Receiver, Sender};
use futures::prelude::*;
use futures::select;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::ops::Add;
use std::time::{Duration, Instant};
use zenoh::net::queryable::EVAL;
use zenoh::net::{
CongestionControl, ConsolidationMode, QueryConsolidation, QueryTarget, Reliability, ResKey,
Sample, Session, SubInfo, SubMode,
};
use zenoh_util::sync::Condition;
const GROUP_PREFIX: &str = "/zenoh/ext/net/group";
const EVENT_POSTFIX: &str = "evt";
const MAX_START_LOOKOUT_DELAY: usize = 2;
const VIEW_REFRESH_LEASE_RATIO: f32 = 0.75f32;
const DEFAULT_QUERY_TIMEOUT: Duration = Duration::from_secs(1);
const DEFAULT_LEASE: Duration = Duration::from_secs(18);
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct JoinEvent {
member: Member,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LeaseExpiredEvent {
mid: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct LeaveEvent {
mid: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct NewGroupViewEvent {
source: String,
members: Vec<Member>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct NewLeaderEvent {
mid: String,
}
#[derive(Serialize, Deserialize, Debug)]
struct KeepAliveEvent {
mid: String,
}
#[derive(Serialize, Deserialize, Debug)]
enum GroupNetEvent {
Join(JoinEvent),
Leave(LeaveEvent),
KeepAlive(KeepAliveEvent),
NewGroupView(NewGroupViewEvent),
}
/// Events exposed to the user to be informed for relevant
/// changes in the group.
#[derive(Serialize, Deserialize, Debug)]
pub enum GroupEvent {
Join(JoinEvent),
Leave(LeaveEvent),
LeaseExpired(LeaseExpiredEvent),
NewLeader(NewLeaderEvent),
NewGroupView(NewGroupViewEvent),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum MemberLiveliness {
Auto,
Manual,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Member {
mid: String,
info: Option<String>,
liveliness: MemberLiveliness,
lease: Duration,
}
impl Member {
pub fn new(mid: &str) -> Member {
Member {
mid: String::from(mid),
info: None,
liveliness: MemberLiveliness::Auto,
lease: DEFAULT_LEASE,
}
}
pub fn info(&mut self, i: &str) -> &mut Self {
self.info = Some(String::from(i));
self
}
pub fn lease(&mut self, d: Duration) -> &mut Self {
self.lease = d;
self
}
pub fn liveliness(&mut self, l: MemberLiveliness) -> &mut Self {
self.liveliness = l;
self
}
}
struct GroupState {
gid: String,
local_member: Member,
members: Mutex<HashMap<String, (Member, Instant)>>,
group_resource: String,
group_resource_id: u64,
event_resource: ResKey,
user_events_tx: Mutex<Option<Sender<GroupEvent>>>,
cond: Condition,
}
pub struct Group {
state: Arc<GroupState>,
keep_alive_task: Option<JoinHandle<()>>,
net_evt_task: JoinHandle<()>,
}
async fn keep_alive_task(z: Arc<Session>, state: Arc<GroupState>) {
let mid = state.local_member.mid.clone();
let evt = GroupNetEvent::KeepAlive(KeepAliveEvent { mid });
let buf = bincode::serialize(&evt).unwrap();
let period = state.local_member.lease.mul_f32(VIEW_REFRESH_LEASE_RATIO);
loop {
async_std::task::sleep(period).await;
log::debug!("Sending Keep Alive for: {}", &state.local_member.mid);
z.write_ext(
&state.event_resource,
(buf.clone()).into(),
0,
0,
CongestionControl::Drop,
);
}
}
fn spawn_watchdog(s: Arc<GroupState>, period: Duration) -> JoinHandle<()> {
let watch_dog = async move {
loop {
async_std::task::sleep(period).await;
let now = Instant::now();
let mut ms = s.members.lock().await;
let expired_members: Vec<String> = ms
.iter()
.filter(|e| e.1 .1 < now)
.map(|e| String::from(e.0))
.collect();
for e in &expired_members {
ms.remove(e);
}
drop(ms);
let u_evt = &*s.user_events_tx.lock().await;
for e in expired_members {
if let Some(tx) = u_evt {
tx.send(LeaseExpired(LeaseExpiredEvent { mid: e })).unwrap()
}
}
}
};
async_std::task::spawn(watch_dog)
}
async fn query_handler(z: Arc<Session>, state: Arc<GroupState>) {
let qres = format!(
"{}/{}/{}",
GROUP_PREFIX, &state.gid, &state.local_member.mid
);
log::debug!("Started query handler for: {}", &qres);
let buf = bincode::serialize(&state.local_member).unwrap();
let mut queryable = z
.declare_queryable(&qres.clone().into(), EVAL)
.await
.unwrap();
while let Some(query) = queryable.receiver().next().await {
log::debug!("Serving query for: {}", &qres);
query.reply(Sample {
res_name: qres.clone(),
payload: buf.clone().into(),
data_info: None,
})
}
}
async fn advertise_view(z: &Arc<Session>, state: &Arc<GroupState>) {
log::debug!("Maybe Advertising NewGroupView....");
let mut min: String = state.local_member.mid.clone();
let sid = &state.local_member.mid;
let mut members: Vec<Member> = state
.members
.lock()
.await
.iter()
.map(|e| {
if e.0 < sid {
min = e.0.clone()
};
e.1 .0.clone()
})
.collect();
members.push(state.local_member.clone());
if min == String::from(sid) {
let evt = GroupNetEvent::NewGroupView(NewGroupViewEvent {
source: sid.clone(),
members,
});
log::debug!("Advertising NewGroupView: {:?}", &evt);
let buf = bincode::serialize(&evt).unwrap();
let res = format!("{}/{}/{}", GROUP_PREFIX, &state.gid, EVENT_POSTFIX);
z.write(&res.into(), buf.into());
}
}
async fn net_event_handler(z: Arc<Session>, state: Arc<GroupState>) {
let sub_info = SubInfo {
period: None,
mode: SubMode::Push,
reliability: Reliability::Reliable,
};
let mut sub = z
.declare_subscriber(&state.event_resource, &sub_info)
.await
.unwrap();
let mut stream = sub.receiver();
while let Some(s) = stream.next().await {
log::debug!("Handling Network Event...");
match bincode::deserialize::<GroupNetEvent>(&(s.payload.to_vec())) {
Ok(evt) => match evt {
GroupNetEvent::Join(je) => {
advertise_view(&z, &state).await;
log::debug!("Member joining the group:\n{:?}", &je.member);
let alive_till = Instant::now().add(je.member.lease.clone());
let mut ms = state.members.lock().await;
ms.insert(je.member.mid.clone(), (je.member.clone(), alive_till));
state.cond.notify_all();
drop(ms);
let u_evt = &*state.user_events_tx.lock().await;
if let Some(tx) = u_evt {
tx.send(GroupEvent::Join(je)).unwrap()
}
}
GroupNetEvent::Leave(le) => {
log::debug!("Member leaving:\n{:?}", &le.mid);
state.members.lock().await.remove(&le.mid);
let u_evt = &*state.user_events_tx.lock().await;
if let Some(tx) = u_evt {
tx.send(GroupEvent::Leave(le)).unwrap()
}
}
GroupNetEvent::KeepAlive(kae) => {
log::debug!(
"KeepAlive for {} != {} -> {}",
&kae.mid,
state.local_member.mid,
kae.mid.ne(&state.local_member.mid)
);
if kae.mid.ne(&state.local_member.mid) {
let mut mm = state.members.lock().await;
log::debug!("Members: \n{:?}", &mm);
let v = mm.remove(&kae.mid);
match v {
Some((m, h)) => {
log::debug!("Updating leasefor: \n{:?}", &kae.mid);
let alive_till = Instant::now().add(m.lease.clone());
mm.insert(m.mid.clone(), (m, alive_till));
}
None => {
log::debug!(
"Received Keep Alive from unknown member: {}",
&kae.mid
);
let qres = format!("{}/{}/{}", GROUP_PREFIX, &state.gid, kae.mid);
// @TODO: we could also send this member info
let qc = QueryConsolidation {
first_routers: ConsolidationMode::None,
last_router: ConsolidationMode::None,
reception: ConsolidationMode::None,
};
log::debug!("Issuing Query for {}", &qres);
let mut receiver = z
.query(&qres.into(), "", QueryTarget::default(), qc)
.await
.unwrap();
while let Some(sample) = receiver.next().await {
match bincode::deserialize::<Member>(
&sample.data.payload.to_vec(),
) {
Ok(m) => {
let mut expiry = Instant::now();
expiry = expiry.add(m.lease);
log::debug!("Received member information: {:?}", &m);
mm.insert(kae.mid.clone(), (m, expiry));
}
Err(e) => {
log::debug!(
"Unable to deserialize the Member info received:\n {}", e);
}
}
}
state.cond.notify_all();
}
}
} else {
log::debug!("KeepAlive from Local Participant -- Ignoring");
}
}
GroupNetEvent::NewGroupView(ngve) => {
let mut ms = state.members.lock().await;
for m in ngve.members {
if let None = ms.get(&m.mid) {
let alive_till = Instant::now().add(m.lease.clone());
ms.insert(m.mid.clone(), (m, alive_till));
}
}
}
},
Err(e) => {
log::warn!("Failed decoding net-event due to:\n{:?}", e);
}
}
}
}
impl Group {
pub async fn join(z: Arc<Session>, group: &str, with: &Member) -> Group {
let group_resource = format!("{}/{}", GROUP_PREFIX, group);
let rid = z
.declare_resource(&(group_resource.clone()).into())
.await
.unwrap();
let event_resource = ResKey::RIdWithSuffix(rid, EVENT_POSTFIX.into());
let state = Arc::new(GroupState {
gid: String::from(group),
local_member: with.clone(),
members: Mutex::new(Default::default()),
group_resource,
group_resource_id: rid,
event_resource: event_resource.clone(),
user_events_tx: Mutex::new(Default::default()),
cond: Condition::new(),
});
let is_auto_liveliness = match with.liveliness {
MemberLiveliness::Auto => true,
_ => false,
};
// announce the member:
log::debug!("Sending Join Message for local member:\n{:?}", &with);
let join_evt = GroupNetEvent::Join(JoinEvent {
member: with.clone(),
});
let buf = bincode::serialize(&join_evt).unwrap();
let _ = z.write(&event_resource, buf.into()).await;
// If the liveliness is manual it is the user who has to assert it.
let kah = if is_auto_liveliness {
Some(async_std::task::spawn(keep_alive_task(
z.clone(),
state.clone(),
)))
} else {
None
};
let net_evt_task = async_std::task::spawn(net_event_handler(z.clone(), state.clone()));
let query_hander = async_std::task::spawn(query_handler(z.clone(), state.clone()));
let watchdog = spawn_watchdog(state.clone(), Duration::from_secs(1));
Group {
state: state,
keep_alive_task: kah,
net_evt_task,
}
}
/// Returns a receivers that will allow to receive notifications for group events.
/// Notice that there can be a single subscription at the time, each call to subscribe
/// will cancel the previous subscription.
pub async fn subscribe(&self) -> Receiver<GroupEvent> {
let (tx, rx) = flume::unbounded();
*self.state.user_events_tx.lock().await = Some(tx);
rx
}
/// Returns the group identifier.
pub fn group_id(&self) -> &str {
&self.state.gid
}
/// Returns this member identifier.
pub fn local_member_id(&self) -> &str {
&self.state.local_member.mid
}
/// Returns the current group view, in other terms the list
/// of group members.
pub async fn view(&self) -> Vec<Member> {
let mut ms: Vec<Member> = self
.state
.members
.lock()
.await
.iter()
.map(|e| e.1 .0.clone())
.collect();
ms.push(self.state.local_member.clone());
ms
}
/// Wait for a view size to be established or times out. The resulting predicate
/// indicates whether the desired view size has been established.
pub async fn wait_for_view_size(&self, size: usize, timeout: Duration) -> bool {
if self.state.members.lock().await.len() + 1 >= size {
true
} else {
// let s = self.state.clone();
let f = async {
loop {
let ms = self.state.members.lock().await;
if ms.len() + 1 >= size {
return true;
} else {
self.state.cond.wait(ms).await;
}
}
};
let r: bool = select! {
p = f.fuse() => { p },
_ = async_std::task::sleep(timeout).fuse() => { false },
};
r
}
}
/// Returns the current group size.
pub async fn size(&self) -> usize {
let ms = self.state.members.lock().await;
ms.len() + 1 // with +1 being the local member
}
}
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CTRUP {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `CTERR`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CTERRR {
#[doc = "No read error occurred value."]
NOERR,
#[doc = "Read error occurred value."]
RDERR,
}
impl CTERRR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
CTERRR::NOERR => false,
CTERRR::RDERR => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> CTERRR {
match value {
false => CTERRR::NOERR,
true => CTERRR::RDERR,
}
}
#[doc = "Checks if the value of the field is `NOERR`"]
#[inline]
pub fn is_noerr(&self) -> bool {
*self == CTERRR::NOERR
}
#[doc = "Checks if the value of the field is `RDERR`"]
#[inline]
pub fn is_rderr(&self) -> bool {
*self == CTERRR::RDERR
}
}
#[doc = "Possible values of the field `CEB`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CEBR {
#[doc = "Disable the Century bit from changing value."]
DIS,
#[doc = "Enable the Century bit to change value."]
EN,
}
impl CEBR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
CEBR::DIS => false,
CEBR::EN => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> CEBR {
match value {
false => CEBR::DIS,
true => CEBR::EN,
}
}
#[doc = "Checks if the value of the field is `DIS`"]
#[inline]
pub fn is_dis(&self) -> bool {
*self == CEBR::DIS
}
#[doc = "Checks if the value of the field is `EN`"]
#[inline]
pub fn is_en(&self) -> bool {
*self == CEBR::EN
}
}
#[doc = "Possible values of the field `CB`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CBR {
#[doc = "Century is 2000s value."]
_2000,
#[doc = "Century is 1900s/2100s value."]
_1900_2100,
}
impl CBR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
CBR::_2000 => false,
CBR::_1900_2100 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> CBR {
match value {
false => CBR::_2000,
true => CBR::_1900_2100,
}
}
#[doc = "Checks if the value of the field is `_2000`"]
#[inline]
pub fn is_2000(&self) -> bool {
*self == CBR::_2000
}
#[doc = "Checks if the value of the field is `_1900_2100`"]
#[inline]
pub fn is_1900_2100(&self) -> bool {
*self == CBR::_1900_2100
}
}
#[doc = r" Value of the field"]
pub struct CTRWKDYR {
bits: u8,
}
impl CTRWKDYR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct CTRYRR {
bits: u8,
}
impl CTRYRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct CTRMOR {
bits: u8,
}
impl CTRMOR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct CTRDATER {
bits: u8,
}
impl CTRDATER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Values that can be written to the field `CTERR`"]
pub enum CTERRW {
#[doc = "No read error occurred value."]
NOERR,
#[doc = "Read error occurred value."]
RDERR,
}
impl CTERRW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
CTERRW::NOERR => false,
CTERRW::RDERR => true,
}
}
}
#[doc = r" Proxy"]
pub struct _CTERRW<'a> {
w: &'a mut W,
}
impl<'a> _CTERRW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CTERRW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "No read error occurred value."]
#[inline]
pub fn noerr(self) -> &'a mut W {
self.variant(CTERRW::NOERR)
}
#[doc = "Read error occurred value."]
#[inline]
pub fn rderr(self) -> &'a mut W {
self.variant(CTERRW::RDERR)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 31;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CEB`"]
pub enum CEBW {
#[doc = "Disable the Century bit from changing value."]
DIS,
#[doc = "Enable the Century bit to change value."]
EN,
}
impl CEBW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
CEBW::DIS => false,
CEBW::EN => true,
}
}
}
#[doc = r" Proxy"]
pub struct _CEBW<'a> {
w: &'a mut W,
}
impl<'a> _CEBW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CEBW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Disable the Century bit from changing value."]
#[inline]
pub fn dis(self) -> &'a mut W {
self.variant(CEBW::DIS)
}
#[doc = "Enable the Century bit to change value."]
#[inline]
pub fn en(self) -> &'a mut W {
self.variant(CEBW::EN)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `CB`"]
pub enum CBW {
#[doc = "Century is 2000s value."]
_2000,
#[doc = "Century is 1900s/2100s value."]
_1900_2100,
}
impl CBW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
CBW::_2000 => false,
CBW::_1900_2100 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _CBW<'a> {
w: &'a mut W,
}
impl<'a> _CBW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: CBW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Century is 2000s value."]
#[inline]
pub fn _2000(self) -> &'a mut W {
self.variant(CBW::_2000)
}
#[doc = "Century is 1900s/2100s value."]
#[inline]
pub fn _1900_2100(self) -> &'a mut W {
self.variant(CBW::_1900_2100)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CTRWKDYW<'a> {
w: &'a mut W,
}
impl<'a> _CTRWKDYW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 7;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CTRYRW<'a> {
w: &'a mut W,
}
impl<'a> _CTRYRW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 255;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CTRMOW<'a> {
w: &'a mut W,
}
impl<'a> _CTRMOW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CTRDATEW<'a> {
w: &'a mut W,
}
impl<'a> _CTRDATEW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 63;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 31 - Counter read error status. Error is triggered when software reads the lower word of the counters, and fails to read the upper counter within 1/100 second. This is because when the lower counter is read, the upper counter is held off from incrementing until it is read so that the full time stamp can be read."]
#[inline]
pub fn cterr(&self) -> CTERRR {
CTERRR::_from({
const MASK: bool = true;
const OFFSET: u8 = 31;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 28 - Century enable"]
#[inline]
pub fn ceb(&self) -> CEBR {
CEBR::_from({
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 27 - Century"]
#[inline]
pub fn cb(&self) -> CBR {
CBR::_from({
const MASK: bool = true;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 24:26 - Weekdays Counter"]
#[inline]
pub fn ctrwkdy(&self) -> CTRWKDYR {
let bits = {
const MASK: u8 = 7;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
};
CTRWKDYR { bits }
}
#[doc = "Bits 16:23 - Years Counter"]
#[inline]
pub fn ctryr(&self) -> CTRYRR {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u8
};
CTRYRR { bits }
}
#[doc = "Bits 8:12 - Months Counter"]
#[inline]
pub fn ctrmo(&self) -> CTRMOR {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) as u8
};
CTRMOR { bits }
}
#[doc = "Bits 0:5 - Date Counter"]
#[inline]
pub fn ctrdate(&self) -> CTRDATER {
let bits = {
const MASK: u8 = 63;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
CTRDATER { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 31 - Counter read error status. Error is triggered when software reads the lower word of the counters, and fails to read the upper counter within 1/100 second. This is because when the lower counter is read, the upper counter is held off from incrementing until it is read so that the full time stamp can be read."]
#[inline]
pub fn cterr(&mut self) -> _CTERRW {
_CTERRW { w: self }
}
#[doc = "Bit 28 - Century enable"]
#[inline]
pub fn ceb(&mut self) -> _CEBW {
_CEBW { w: self }
}
#[doc = "Bit 27 - Century"]
#[inline]
pub fn cb(&mut self) -> _CBW {
_CBW { w: self }
}
#[doc = "Bits 24:26 - Weekdays Counter"]
#[inline]
pub fn ctrwkdy(&mut self) -> _CTRWKDYW {
_CTRWKDYW { w: self }
}
#[doc = "Bits 16:23 - Years Counter"]
#[inline]
pub fn ctryr(&mut self) -> _CTRYRW {
_CTRYRW { w: self }
}
#[doc = "Bits 8:12 - Months Counter"]
#[inline]
pub fn ctrmo(&mut self) -> _CTRMOW {
_CTRMOW { w: self }
}
#[doc = "Bits 0:5 - Date Counter"]
#[inline]
pub fn ctrdate(&mut self) -> _CTRDATEW {
_CTRDATEW { w: self }
}
}
|
use regex::Regex;
lazy_static! {
/// Bitcoin Regex Pattern
static ref BTC: Regex = Regex::new(r"(?i)^[13][a-km-zA-HJ-NP-Z1-9]{25,34}$").unwrap();
/// Bitcoin Cash Regex Pattern
static ref BCH: Regex = Regex::new(r"(?i)^((bitcoincash|bchreg|bchtest):)?(q|p)[a-z0-9]{41}$").unwrap();
/// Ethereum Regex Pattern
static ref ETH: Regex = Regex::new(r"(?i)^0x[a-fA-F0-9]{40}$").unwrap();
/// Litecoin Regex Pattern
static ref LTC: Regex = Regex::new(r"(?i)^[LM3][a-km-zA-HJ-NP-Z1-9]{26,33}$").unwrap();
/// Dodge Coin Regex Pattern
static ref DODGE: Regex = Regex::new(r"(?i)^D{1}[5-9A-HJ-NP-U]{1}[1-9A-HJ-NP-Za-km-z]{32}$").unwrap();
/// Dash Regex Pattern
static ref DASH: Regex = Regex::new(r"^(?i)X[1-9A-HJ-NP-Za-km-z]{33}$").unwrap();
/// Monero Regex Pattern
static ref XMR: Regex = Regex::new(r"(?i)^4[0-9AB][1-9A-HJ-NP-Za-km-z]{93}$").unwrap();
/// Neo Regex Pattern
static ref NEO: Regex = Regex::new(r"(?i)^A[0-9a-zA-Z]{33}$").unwrap();
/// Ripple Regex Pattern
static ref XRP: Regex = Regex::new(r"(?i)^r|X[0-9a-zA-Z]{33,47}$").unwrap();
}
enum Type {
Bitcoin,
BitcoinCash,
Ethereum,
Litecoin,
Dodge,
Dash,
Monero,
Neo,
Ripple,
}
impl Type {
fn name<'a>(&self) -> &'a str {
match *self {
Type::Bitcoin => "Bitcoin",
Type::BitcoinCash => "Bitcoin Cash",
Type::Ethereum => "Ethereum",
Type::Litecoin => "Litecoin",
Type::Dodge => "Dodgecoin",
Type::Dash => "Dash",
Type::Monero => "Monero",
Type::Neo => "Neo",
Type::Ripple => "Ripple",
}
}
fn pattern<'a>(&self) -> &'a Regex {
match *self {
Type::Bitcoin => &BTC,
Type::BitcoinCash => &BCH,
Type::Ethereum => Ð,
Type::Litecoin => <C,
Type::Dodge => &DODGE,
Type::Dash => &DASH,
Type::Monero => &XMR,
Type::Neo => &NEO,
Type::Ripple => &XRP,
}
}
fn all() -> Vec<Type> {
vec![
Type::Bitcoin,
Type::BitcoinCash,
Type::Ethereum,
Type::Litecoin,
Type::Dodge,
Type::Dash,
Type::Monero,
Type::Neo,
Type::Ripple,
]
}
}
/// Evaluate CryptoCurrency & Validate
fn validate(value: &str) -> bool {
for cryptocurrency in Type::all() {
if cryptocurrency.pattern().is_match(&value) {
return true
}
}
false
}
pub fn is_bitcoin(value: &str) -> bool {
//! Check if the given crypto address is Bitcoin.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_bitcoin;
//! fn main() {
//! assert_eq!(is_bitcoin("<bitcoin address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_bitcoin_cash(value: &str) -> bool {
//! Check if the given crypto address is Bitcoin Cash.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_bitcoin_cash;
//! fn main() {
//! assert_eq!(is_bitcoin_cash("<bitcoin cash address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_ethereum(value: &str) -> bool {
//! Check if the given crypto address is Ethereum.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_ethereum;
//! fn main() {
//! assert_eq!(is_ethereum("<ethereum address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_litecoin(value: &str) -> bool {
//! Check if the given crypto address is Litecoin.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_litecoin;
//! fn main() {
//! assert_eq!(is_litecoin("<litecoin address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_dogecoin(value: &str) -> bool {
//! Check if the given crypto address is Dodgecoin.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_dogecoin;
//! fn main() {
//! assert_eq!(is_dogecoin("<dodgecoin address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_dash(value: &str) -> bool {
//! Check if the given crypto address is Dash.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_dash;
//! fn main() {
//! assert_eq!(is_dash("<dash address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_monero(value: &str) -> bool {
//! Check if the given crypto address is Monero.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_monero;
//! fn main() {
//! assert_eq!(is_monero("<monero address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_neo(value: &str) -> bool {
//! Check if the given crypto address is Neo.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_neo;
//! fn main() {
//! assert_eq!(is_neo("<neo address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_ripple(value: &str) -> bool {
//! Check if the given crypto address is Ripple.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_ripple;
//! fn main() {
//! assert_eq!(is_ripple("<ripple address>"), false);
//! }
//! ```
validate(value)
}
pub fn is_cryptocurrency_any(value: &str) -> bool {
//! Check if the given string is a Crypto Currency.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::is_cryptocurrency_any;
//! fn main() {
//! assert_eq!(is_cryptocurrency_any("<cryptocurrency address>"), false);
//! }
//! ```
validate(value)
}
pub fn which_cryptocurrency(value: &str) -> Option<&str> {
//! Output the Crypto Currency Name given the cryptocurrencty address.
//!
//! ## Example Usage
//! ```rust
//! use validaten::crypto::which_cryptocurrency;
//! fn main() {
//! assert_eq!(which_cryptocurrency("<cryptocurrency address>"), None);
//! }
//! ```
for cryptocurrency in Type::all() {
if cryptocurrency.pattern().is_match(&value) {
return Some(cryptocurrency.name())
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_which_cryptocurrency() {
// Bitcoin
assert_eq!(which_cryptocurrency("1GiWxH6PzSSmbdcK72XfGpqhjSb6nae6h9"), Some("Bitcoin"));
// Bitcoin Cash
assert_eq!(which_cryptocurrency("qppjlghjlwg6tgxv7ffhvs43rlul0kpp4c0shk4dr6"), Some("Bitcoin Cash"));
// Ethereum
assert_eq!(which_cryptocurrency("0xaae47eae4ddd4877e0ae0bc780cfaee3cc3b52cb"), Some("Ethereum"));
// Litecoin
assert_eq!(which_cryptocurrency("LQ4i7FLNhfCC9GXw682mS1NzvVKbtJAFZq"), Some("Litecoin"));
// Dodgecoin
assert_eq!(which_cryptocurrency("D6K2nqqQKycTucCSFSHhpiig4yQ6NPQRf9"), Some("Dodgecoin"));
// Dash
assert_eq!(which_cryptocurrency("XqLYPDTADW6EYuQmTcEAx81o8EHTKwqTK8"), Some("Dash"));
// Monero
assert_eq!(which_cryptocurrency("41gYNjXMeXaTmZFVv645A1HRVoA637cXFGbDdLV8Gn5hLvfxfRLKigUTvm2HVZhBzDVPeGpDy71qxASTpRFgepDwLexA8Ti"), Some("Monero"));
// Neo
assert_eq!(which_cryptocurrency("AeHauBkGkHPTxh4PEUhNr7WRgivmcdCRnR"), Some("Neo"));
// Ripple
assert_eq!(which_cryptocurrency("rUocf1ixKzTuEe34kmVhRvGqNCofY1NJzV"), Some("Ripple"));
// No coin identified
assert_eq!(which_cryptocurrency("LQ4i7FLNbtJAFZq"), None);
}
#[test]
fn test_is_cryptocurrency_any() {
assert!(is_cryptocurrency_any("D6K2nqqQKycTucCSFSHhpiig4yQ6NPQRf9"));
assert!(!is_cryptocurrency_any("LQ4i7FLNbtJAFZq"));
}
#[test]
fn test_is_bitcoin() {
assert!(is_bitcoin("1GiWxH6PzSSmbdcK72XfGpqhjSb6nae6h9"))
}
#[test]
fn test_is_bitcoin_cash() {
assert!(is_bitcoin_cash("qppjlghjlwg6tgxv7ffhvs43rlul0kpp4c0shk4dr6"))
}
#[test]
fn test_is_ethereum() {
assert!(is_ethereum("0xaae47eae4ddd4877e0ae0bc780cfaee3cc3b52cb"))
}
#[test]
fn test_is_litecoin() {
assert!(is_litecoin("LQ4i7FLNhfCC9GXw682mS1NzvVKbtJAFZq"))
}
#[test]
fn test_is_dogecoin() {
assert!(is_dogecoin("D6K2nqqQKycTucCSFSHhpiig4yQ6NPQRf9"))
}
#[test]
fn test_is_dash() {
assert!(is_dash("XqLYPDTADW6EYuQmTcEAx81o8EHTKwqTK8"))
}
#[test]
fn test_is_monero() {
assert!(is_monero("41gYNjXMeXaTmZFVv645A1HRVoA637cXFGbDdLV8Gn5hLvfxfRLKigUTvm2HVZhBzDVPeGpDy71qxASTpRFgepDwLexA8Ti"))
}
#[test]
fn test_is_neo() {
assert!(is_neo("AeHauBkGkHPTxh4PEUhNr7WRgivmcdCRnR"))
}
#[test]
fn test_is_ripple() {
assert!(is_ripple("rUocf1ixKzTuEe34kmVhRvGqNCofY1NJzV"))
}
} |
use sdl2::rect::Rect as SdlRect;
pub struct Rectangle {
pub x: f64,
pub y: f64,
pub width: f64,
pub height: f64,
}
impl Rectangle {
pub fn to_sdl(&self) -> SdlRect {
assert!(self.width >= 0f64 && self.height >= 0f64);
SdlRect::new(self.x as i32, self.y as i32, self.width as u32, self.height as u32)
}
} |
// cargo run --release -- ~/p/covid_county.json ./out
use anyhow::{Context, Result};
use chrono::{TimeZone, Utc};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::fs;
use std::path::PathBuf;
use structopt::StructOpt;
type Rfc3339 = String;
#[derive(Debug, StructOpt)]
#[structopt(name = "example", about = "An example of StructOpt usage.")]
struct Opt {
/// Input file containing covid API response
// https://www.knowi.com/coronavirus-dashboards/covid-19-api/
//
// per county level:
// curl https://knowi.com/api/data/ipE4xJhLBkn8H8jisFisAdHKvepFR5I4bGzRySZ2aaXlJgie\?entityName\=Raw%20County%20level%20Data\&exportFormat\=json
#[structopt(parse(from_os_str))]
input: PathBuf,
#[structopt(parse(from_os_str))]
output_dir: PathBuf,
}
#[derive(Deserialize)]
struct CovidCountyRawDataEntry {
#[serde(rename(deserialize = "Date"))]
pub date: i64,
#[serde(rename(deserialize = "County"))]
pub county: String,
#[serde(rename(deserialize = "State"))]
pub state: String,
pub values: i64,
#[serde(rename(deserialize = "Type"))]
pub entry_type: String,
}
#[derive(Debug)]
struct CountyEntry {
name: String,
state: String,
confirmed: i64,
deaths: i64,
}
#[derive(Serialize, Debug, Default)]
struct Node {
name: String,
metrics: BTreeMap<&'static str, i64>,
edges_directed: BTreeSet<String>,
extra_fields: BTreeMap<&'static str, String>,
}
impl Node {
pub fn add_metric(&mut self, m: &'static str, v: i64) {
if !self.metrics.contains_key(m) {
self.metrics.insert(m, 0);
}
*self.metrics.get_mut(m).unwrap() += v;
}
}
#[derive(Serialize, Debug, Default)]
struct Graph {
timestamp: Rfc3339,
nodes: Vec<Node>,
}
fn main() -> Result<()> {
let l = ll::Logger::stdout();
let Opt { input, output_dir } = Opt::from_args();
let raw_data = l.event("read_file", |e| {
let data = fs::read(input).context("Failed to read raw covid JSON data")?;
e.add_data("size MB", data.capacity() / 1000000);
Ok(data)
})?;
let data = l.event("parse", |e| {
let result = serde_json::from_slice::<Vec<CovidCountyRawDataEntry>>(&raw_data[..])
.context("Failed to parse JSON")?;
e.add_data("entries", result.len());
Ok(result)
})?;
let grouped = l.event("group by", |_| {
let result =
data.into_par_iter()
.fold(
|| HashMap::new(),
|mut result, entry| {
// raw data is in milisseconds
let date = Utc.timestamp(entry.date / 1000, 0);
let date_entry = result.entry(date).or_insert_with(HashMap::new);
let state = entry.state;
let name = entry.county;
// Make sure we namespace by state in case there are similar county names
let county_key = format!("{} - {}", &state, &name);
let county_entry =
date_entry.entry(county_key).or_insert_with(|| CountyEntry {
name,
state,
confirmed: 0,
deaths: 0,
});
match entry.entry_type.as_str() {
"Confirmed" => county_entry.confirmed += entry.values,
"Deaths" => county_entry.deaths += entry.values,
_ => (),
}
result
},
)
.reduce(
|| HashMap::new(),
|from, mut into| {
for (date, from_county_entries) in from {
let into_date_entry = into.entry(date).or_insert_with(HashMap::new);
for (county, from_county_entry) in from_county_entries {
let from_confirmed = from_county_entry.confirmed;
let from_deaths = from_county_entry.deaths;
let into_county_entry = into_date_entry
.entry(county)
.or_insert_with(|| CountyEntry {
name: from_county_entry.name,
state: from_county_entry.state,
confirmed: 0,
deaths: 0,
});
into_county_entry.confirmed += from_confirmed;
into_county_entry.deaths += from_deaths;
}
}
into
},
);
Ok(result)
})?;
let with_state_nodes = l.event("add state nodes", |_| {
let nodes_by_date = grouped
.into_par_iter()
.map(|(date, entries)| {
let mut states = HashMap::new();
let mut all_nodes: Vec<Node> = Vec::new();
for (key, county_entry) in entries {
if !states.contains_key(&county_entry.state) {
states.insert(
county_entry.state.clone(),
Node {
name: county_entry.state.clone(),
..Default::default()
},
);
}
let state_entry = states
.get_mut(&county_entry.state)
.expect("state must be there");
state_entry.add_metric("confirmed", county_entry.confirmed);
state_entry.add_metric("deaths", county_entry.deaths);
state_entry.edges_directed.insert(key.clone());
all_nodes.push(Node {
name: key,
metrics: vec![
("confirmed", county_entry.confirmed),
("deaths", county_entry.deaths),
]
.into_iter()
.collect(),
extra_fields: vec![("display_name", county_entry.name)]
.into_iter()
.collect(),
edges_directed: BTreeSet::new(),
})
}
for (_, state) in states {
all_nodes.push(state);
}
Graph {
timestamp: date.to_rfc3339(),
nodes: all_nodes,
}
})
.collect::<Vec<Graph>>();
Ok(nodes_by_date)
})?;
l.event("write_files", |e| {
e.add_data("output_dir", output_dir.display().to_string());
e.add_data("num_files", with_state_nodes.len());
with_state_nodes
.into_par_iter()
.map(|graph| {
let mut filepath = output_dir.clone();
filepath.push(&graph.timestamp);
let json = serde_json::to_string_pretty(&graph)?;
fs::write(filepath, json)?;
Ok(())
})
.collect::<Result<()>>()?;
Ok(())
})?;
Ok(())
}
|
use analyser::interface::*;
use ops::prelude::*;
pub mod conv2d;
pub mod local_patch;
pub mod pools;
pub mod space_to_batch;
pub fn register_all_ops(reg: &mut OpRegister) {
reg.insert("AvgPool", pools::pool::<pools::AvgPooler>);
reg.insert("Conv2D", conv2d::conv2d);
reg.insert("MaxPool", pools::pool::<pools::MaxPooler>);
reg.insert("Relu", relu);
reg.insert("Sigmoid", sigmoid);
reg.insert("Softmax", Softmax::build);
reg.insert("SpaceToBatchND", space_to_batch::space_to_batch_nd);
reg.insert("BatchToSpaceND", space_to_batch::batch_to_space_nd);
}
element_map_signed!(Relu, relu, |x| if x.is_negative() { T::zero() } else { x });
element_map_float!(Sigmoid, sigmoid, |x| T::one() / (T::one() + x.neg().exp()));
#[derive(Debug, Clone)]
pub struct Softmax {}
impl Softmax {
pub fn build(_pb: &::tfpb::node_def::NodeDef) -> Result<Box<Op>> {
Ok(Box::new(Softmax {}))
}
}
impl Op for Softmax {
/// Evaluates the operation given the input tensors.
fn eval(&self, mut inputs: Vec<TensorView>) -> Result<Vec<TensorView>> {
let m_input = args_1!(inputs);
let mut input = m_input
.into_tensor()
.take_f32s()
.ok_or("Expect input #0 to be f32")?;
input.map_inplace(|a| *a = a.exp());
let norm: f32 = input.iter().sum();
input.map_inplace(|a| *a = *a / norm);
let result = Tensor::from(input);
Ok(vec![result.into()])
}
/// Returns the attributes of the operation and their values.
fn get_attributes(&self) -> HashMap<&'static str, Attr> {
hashmap!{}
}
}
impl InferenceRulesOp for Softmax {
/// Registers the inference rules of the operator.
fn rules<'r, 'p: 'r, 's: 'r>(
&'s self,
solver: &mut Solver<'r>,
inputs: &'p TensorsProxy,
outputs: &'p TensorsProxy,
) {
solver
.equals(&inputs.len, 1)
.equals(&outputs.len, 1)
.equals(&inputs[0].datatype, &outputs[0].datatype)
.equals(&inputs[0].shape, &outputs[0].shape);
}
}
pub fn arr4<A, V, U, T>(xs: &[V]) -> ::ndarray::Array4<A>
where
V: ::ndarray::FixedInitializer<Elem = U> + Clone,
U: ::ndarray::FixedInitializer<Elem = T> + Clone,
T: ::ndarray::FixedInitializer<Elem = A> + Clone,
A: Clone,
{
use ndarray::*;
let mut xs = xs.to_vec();
let dim = Ix4(xs.len(), V::len(), U::len(), T::len());
let ptr = xs.as_mut_ptr();
let len = xs.len();
let cap = xs.capacity();
let expand_len = len * V::len() * U::len() * T::len();
::std::mem::forget(xs);
unsafe {
let v = if ::std::mem::size_of::<A>() == 0 {
Vec::from_raw_parts(ptr as *mut A, expand_len, expand_len)
} else if V::len() == 0 || U::len() == 0 || T::len() == 0 {
Vec::new()
} else {
let expand_cap = cap * V::len() * U::len() * T::len();
Vec::from_raw_parts(ptr as *mut A, expand_len, expand_cap)
};
ArrayBase::from_shape_vec_unchecked(dim, v)
}
}
|
//! Measure each Sightglass phase using VTune. When using this [Measure] and running a benchmark
//! inside VTune, VTune will mark the phases in the timeline and allow for filtering based on the
//! Sightglass phase. For example:
//!
//! ```text
//! vtune -collect hotspots sightglass-cli benchmark <path to>/benchmark.wasm --measure vtune
//! ```
use super::{Measure, Measurements};
use ittapi::{Domain, Task};
use lazy_static::lazy_static;
use sightglass_data::Phase;
lazy_static! {
static ref DOMAIN: Domain = Domain::new("sightglass");
}
pub struct VTuneMeasure(Option<Task<'static>>);
impl VTuneMeasure {
pub fn new() -> Self {
Self(None)
}
}
impl Measure for VTuneMeasure {
fn start(&mut self, phase: Phase) {
let previous = self
.0
.replace(Task::begin(&DOMAIN, phase.to_string().as_str()));
assert!(previous.is_none(), "no other VTune task should be running");
}
fn end(&mut self, _phase: Phase, _measurements: &mut Measurements) {
let task = self.0.take();
let task = task.expect("the VTune Task to be started by Measure::start");
task.end();
}
}
|
use dashmap::mapref::{entry::Entry, one::RefMut};
use eyre::Report;
use twilight_model::id::{
marker::{GuildMarker, UserMarker},
Id,
};
use crate::{
commands::osu::ProfileSize,
database::{Authorities, EmbedsSize, GuildConfig, MinimizedPp, Prefix, Prefixes, UserConfig},
BotResult, Context,
};
impl Context {
pub async fn user_config(&self, user_id: Id<UserMarker>) -> BotResult<UserConfig> {
match self.psql().get_user_config(user_id).await? {
Some(config) => Ok(config),
None => {
let config = UserConfig::default();
self.psql().insert_user_config(user_id, &config).await?;
Ok(config)
}
}
}
async fn guild_config_ref(
&self,
guild_id: Id<GuildMarker>,
) -> RefMut<'_, Id<GuildMarker>, GuildConfig> {
match self.data.guilds.entry(guild_id) {
Entry::Occupied(entry) => entry.into_ref(),
Entry::Vacant(entry) => {
let config = GuildConfig::default();
if let Err(why) = self.psql().upsert_guild_config(guild_id, &config).await {
let wrap = format!("failed to insert guild {guild_id}");
let report = Report::new(why).wrap_err(wrap);
warn!("{report:?}");
}
entry.insert(config)
}
}
}
pub async fn guild_authorities(&self, guild_id: Id<GuildMarker>) -> Authorities {
self.guild_config_ref(guild_id).await.authorities.clone()
}
pub async fn guild_prefixes(&self, guild_id: Id<GuildMarker>) -> Prefixes {
self.guild_config_ref(guild_id).await.prefixes.clone()
}
pub async fn guild_first_prefix(&self, guild_id: Option<Id<GuildMarker>>) -> Prefix {
match guild_id {
Some(guild_id) => self.guild_config_ref(guild_id).await.prefixes[0].clone(),
None => "<".into(),
}
}
pub async fn guild_with_lyrics(&self, guild_id: Id<GuildMarker>) -> bool {
self.guild_config_ref(guild_id).await.with_lyrics()
}
pub async fn guild_profile_size(&self, guild_id: Id<GuildMarker>) -> ProfileSize {
self.guild_config_ref(guild_id).await.profile_size()
}
pub async fn guild_show_retries(&self, guild_id: Id<GuildMarker>) -> bool {
self.guild_config_ref(guild_id).await.show_retries()
}
pub async fn guild_embeds_maximized(&self, guild_id: Id<GuildMarker>) -> EmbedsSize {
self.guild_config_ref(guild_id).await.embeds_size()
}
pub async fn guild_track_limit(&self, guild_id: Id<GuildMarker>) -> u8 {
self.guild_config_ref(guild_id).await.track_limit()
}
pub async fn guild_minimized_pp(&self, guild_id: Id<GuildMarker>) -> MinimizedPp {
self.guild_config_ref(guild_id).await.minimized_pp()
}
pub async fn guild_config(&self, guild_id: Id<GuildMarker>) -> GuildConfig {
self.guild_config_ref(guild_id).await.to_owned()
}
pub async fn update_guild_config<F>(&self, guild_id: Id<GuildMarker>, f: F) -> BotResult<()>
where
F: FnOnce(&mut GuildConfig),
{
let mut config = self.data.guilds.entry(guild_id).or_default();
f(config.value_mut());
self.psql().upsert_guild_config(guild_id, &config).await
}
}
|
pub const ERROR_VALUE: u8 = 0u8;
pub type Res = (usize, usize);
|
use error_chain::error_chain;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(reqwest::Error);
UrlParse(url::ParseError);
}
}
pub async fn try_request() -> Result<()> {
// 同步
let res = reqwest::blocking::get("http://httpbin.org/get")?;
let body = res.text()?;
// 异步
let res = reqwest::get("http://httpbin.org/get").await?;
println!("Status: {}", res.status());
println!("Headers:\n{:#?}", res.headers());
let body = res.text().await?;
println!("Body:\n{}", body);
Ok(())
}
// pub fn set_params() -> Result<()> {
// use serde::Deserialize;
// use std::collections::HashMap;
// use url::Url;
// use reqwest::Client;
// use reqwest::header::{UserAgent, Authorization, Bearer};
// header! { (XPoweredBy, "X-Powered-By") => [String] }
// #[derive(Deserialize, Debug)]
// pub struct HeadersEcho {
// pub headers: HashMap<String, String>,
// }
// let url = Url::parse_with_params("http://httpbin.org/headers",
// &[("lang", "rust"), ("browser", "servo")])?;
// let mut response = Client::new()
// .get(url)
// .header(UserAgent::new("Rust-test"))
// .header(Authorization(Bearer { token: "DEadBEEfc001cAFeEDEcafBAd".to_owned() }))
// .header(XPoweredBy("Guybrush Threepwood".to_owned()))
// .send()?;
// let out: HeadersEcho = response.json()?;
// assert_eq!(out.headers["Authorization"],
// "Bearer DEadBEEfc001cAFeEDEcafBAd");
// assert_eq!(out.headers["User-Agent"], "Rust-test");
// assert_eq!(out.headers["X-Powered-By"], "Guybrush Threepwood");
// assert_eq!(response.url().as_str(),
// "http://httpbin.org/headers?lang=rust&browser=servo");
// println!("{:?}", out);
// Ok(())
// } |
use std::f64::consts::PI;
pub const TWO_PI: f64 = PI * 2.0;
pub const PI_2: f64 = PI / 2.0;
pub const EPS: f64 = 1E-8;
pub trait AlmostEq<RHS = Self> {
fn is_eq(&self, rhs: &RHS, eps: f64) -> bool;
}
impl AlmostEq for f64 {
fn is_eq(&self, rhs: &f64, eps: f64) -> bool
{
let res = self - rhs;
res.abs() < eps
}
}
pub trait Rotatable<AngleType> {
type Output;
fn rotate(&mut self, angle: AngleType);
fn rotated(&self, angle: AngleType) -> Self::Output;
}
pub trait Normalizable {
fn normalize(&mut self);
fn normalized(&self) -> Self;
} |
use super::utils;
use crate::{
endpoints::params::DeleteParams,
service::{error::PostgresManagementServiceError, PostgresManagementService},
};
use actix_web::ResponseError;
use async_trait::async_trait;
use chrono::Utc;
use core::pin::Pin;
use drogue_client::registry;
use drogue_cloud_database_common::{
auth::{ensure, ensure_with},
error::ServiceError,
models::{
app::{ApplicationAccessor, PostgresApplicationAccessor},
device::{DeviceAccessor, PostgresDeviceAccessor},
diff::diff_paths,
Generation, Lock,
},
};
use drogue_cloud_registry_events::{Event, EventSender, SendEvent};
use drogue_cloud_service_api::{
auth::user::{authz::Permission, UserInformation},
labels::LabelSelector,
};
use futures::{future, Stream, TryStreamExt};
use tokio_postgres::error::SqlState;
use uuid::Uuid;
#[async_trait]
pub trait ManagementService: Clone {
type Error: ResponseError;
async fn create_app(
&self,
identity: &UserInformation,
data: registry::v1::Application,
) -> Result<(), Self::Error>;
async fn get_app(
&self,
identity: &UserInformation,
name: &str,
) -> Result<Option<registry::v1::Application>, Self::Error>;
async fn list_apps(
&self,
identity: UserInformation,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Application, Self::Error>> + Send>>,
Self::Error,
>;
async fn update_app(
&self,
identity: &UserInformation,
data: registry::v1::Application,
) -> Result<(), Self::Error>;
async fn delete_app(
&self,
identity: &UserInformation,
name: &str,
params: DeleteParams,
) -> Result<(), Self::Error>;
async fn create_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error>;
async fn get_device(
&self,
identity: &UserInformation,
app: &str,
name: &str,
) -> Result<Option<registry::v1::Device>, Self::Error>;
async fn list_devices(
&self,
identity: UserInformation,
app: &str,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Device, Self::Error>> + Send>>,
Self::Error,
>;
async fn update_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error>;
async fn delete_device(
&self,
identity: &UserInformation,
app: &str,
name: &str,
params: DeleteParams,
) -> Result<(), Self::Error>;
}
#[async_trait]
impl<S> ManagementService for PostgresManagementService<S>
where
S: EventSender + Clone,
{
type Error = PostgresManagementServiceError<S::Error>;
async fn create_app(
&self,
identity: &UserInformation,
application: registry::v1::Application,
) -> Result<(), Self::Error> {
let (mut app, aliases) = Self::app_to_entity(application)?;
let generation = app.generation;
let name = app.name.clone();
// assign a new UID
let uid = Uuid::new_v4();
app.uid = uid;
app.owner = identity.user_id().map(Into::into);
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
PostgresApplicationAccessor::new(&t)
.create(app, aliases)
.await
.map_err(|err| match err.sql_state() {
Some(state) if state == &SqlState::UNIQUE_VIOLATION => {
ServiceError::Conflict("Unique key violation".to_string())
}
_ => err,
})?;
let events = Event::new_app(self.instance.clone(), name, uid, generation, vec![]);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change events
events.send_with(&self.sender).await?;
// done
Ok(())
}
async fn get_app(
&self,
identity: &UserInformation,
name: &str,
) -> Result<Option<registry::v1::Application>, Self::Error> {
let c = self.pool.get().await?;
let app = PostgresApplicationAccessor::new(&c)
.get(name, Lock::None)
.await?;
if let Some(app) = &app {
ensure(app, identity, Permission::Read)?;
}
Ok(app.map(Into::into))
}
async fn list_apps(
&self,
identity: UserInformation,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Application, Self::Error>> + Send>>,
Self::Error,
> {
let c = self.pool.get().await?;
Ok(Box::pin(
PostgresApplicationAccessor::new(&c)
.list(
None,
labels,
limit,
offset,
Some(&identity),
Lock::None,
&["NAME"],
)
.await?
.try_filter_map(move |app| {
// Using ensure call here is just a safeguard! The list operation must only return
// entries the user has access to. Otherwise the limit/offset functionality
// won't work
let result = match ensure(&app, &identity, Permission::Read) {
Ok(_) => Some(app.into()),
Err(_) => None,
};
future::ready(Ok(result))
})
.map_err(PostgresManagementServiceError::Service)
.into_stream(),
))
}
async fn update_app(
&self,
identity: &UserInformation,
application: registry::v1::Application,
) -> Result<(), Self::Error> {
let expected_uid = application.metadata.uid.clone();
let expected_resource_version = application.metadata.resource_version.clone();
let (app, aliases) = Self::app_to_entity(application)?;
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let events = self
.perform_update_app(
&t,
Some(identity),
app,
Some(aliases),
expected_uid,
expected_resource_version,
)
.await?;
Self::send_to_outbox(&t, &events).await?;
t.commit().await?;
// send events
events.send_with(&self.sender).await?;
Ok(())
}
async fn delete_app(
&self,
identity: &UserInformation,
id: &str,
params: DeleteParams,
) -> Result<(), Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
// get current state for diffing
let mut current = match accessor.get(&id, Lock::ForUpdate).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
if current.deletion_timestamp.is_some() {
return Ok(());
}
ensure(¤t, identity, Permission::Admin)?;
utils::check_preconditions(¶ms.preconditions, ¤t)?;
// there is no need to use the provided constraints, we as locked the entry "for update"
// next, we need to delete the application
// first, delete all devices ...
let remaining_devices = PostgresDeviceAccessor::new(&t).delete_app(&id).await?;
// ...and count the once we can only soft-delete
if remaining_devices > 0 {
// we have pending device deletions, so add the finalizer
current.finalizers.push("has-devices".into());
}
// next generation
let generation = current.increment_generation()?;
let uid = current.uid;
// if there are no finalizers ...
let paths = if current.finalizers.is_empty() {
// ... delete the application
accessor.delete(id).await?;
// notify an object change
vec![]
} else {
// ... otherwise, mark the application deleted
log::debug!("Pending finalizers: {:?}", current.finalizers);
// update deleted timestamp
current.deletion_timestamp = Some(Utc::now());
// update the record
accessor.update_data(current, None).await?;
// notify a resource change
vec![".metadata".into()]
};
// create events
let events = Event::new_app(self.instance.clone(), id, uid, generation, paths);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change event
events.send_with(&self.sender).await?;
// done
Ok(())
}
async fn create_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error> {
let (mut device, aliases) = Self::device_to_entity(device)?;
let generation = device.generation;
let application = device.application.clone();
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let app = PostgresApplicationAccessor::new(&t)
.get(&application, Lock::ForShare)
.await?;
// if there is no entry, or it is marked for deletion, we don't allow adding a new device
let app = match app {
Some(app) if app.deletion_timestamp.is_none() => app,
_ => return Err(ServiceError::ReferenceNotFound.into()),
};
// ensure we have access to the application, but don't confirm the device if we don't
ensure_with(&app, identity, Permission::Write, || {
ServiceError::ReferenceNotFound
})?;
let name = device.name.clone();
// assign a new UID
let uid = Uuid::new_v4();
device.uid = uid;
// create the device
PostgresDeviceAccessor::new(&t)
.create(device, aliases)
.await
.map_err(|err| match err.sql_state() {
Some(state) if state == &SqlState::UNIQUE_VIOLATION => {
ServiceError::Conflict("Unique key violation".to_string())
}
Some(state) if state == &SqlState::FOREIGN_KEY_VIOLATION => {
ServiceError::ReferenceNotFound
}
_ => err,
})?;
// create and persist events
let events = Event::new_device(
self.instance.clone(),
application,
name,
uid,
generation,
vec![],
);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
t.commit().await?;
// send change events
events.send_with(&self.sender).await?;
// done
Ok(())
}
async fn get_device(
&self,
identity: &UserInformation,
app_id: &str,
device_id: &str,
) -> Result<Option<registry::v1::Device>, Self::Error> {
let c = self.pool.get().await?;
let app = PostgresApplicationAccessor::new(&c)
.get(app_id, Lock::None)
.await?
.ok_or(ServiceError::NotFound)?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(&app, identity, Permission::Read, || ServiceError::NotFound)?;
let device = PostgresDeviceAccessor::new(&c)
.get(app_id, device_id, Lock::None)
.await?;
Ok(device.map(Into::into))
}
async fn list_devices(
&self,
identity: UserInformation,
app_id: &str,
labels: LabelSelector,
limit: Option<usize>,
offset: Option<usize>,
) -> Result<
Pin<Box<dyn Stream<Item = Result<registry::v1::Device, Self::Error>> + Send>>,
Self::Error,
> {
let c = self.pool.get().await?;
let app = PostgresApplicationAccessor::new(&c)
.get(app_id, Lock::None)
.await?
.ok_or(ServiceError::NotFound)?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(&app, &identity, Permission::Read, || ServiceError::NotFound)?;
Ok(Box::pin(
PostgresDeviceAccessor::new(&c)
.list(app_id, None, labels, limit, offset, Lock::None)
.await?
.map_ok(|device| device.into())
.map_err(|err| PostgresManagementServiceError::Service(err))
.into_stream(),
))
}
async fn update_device(
&self,
identity: &UserInformation,
device: registry::v1::Device,
) -> Result<(), Self::Error> {
let expected_resource_version = device.metadata.resource_version.clone();
let expected_uid = device.metadata.uid.clone();
let (mut device, aliases) = Self::device_to_entity(device)?;
let application = device.application.clone();
let name = device.name.clone();
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
let current = match accessor.get(&application, Lock::None).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(¤t, identity, Permission::Write, || {
ServiceError::NotFound
})?;
let accessor = PostgresDeviceAccessor::new(&t);
// get current state for diffing
let current = match accessor.get(&application, &name, Lock::ForUpdate).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
// pre-check versions
utils::check_versions(expected_uid, expected_resource_version, ¤t)?;
// we simply copy over the deletion timestamp
device.deletion_timestamp = current.deletion_timestamp;
if device.deletion_timestamp.is_some() && device.finalizers.is_empty() {
// delete, but don't send any event
accessor.delete(&application, &name).await?;
// check with the application
self.check_clean_app(&t, &application).await?;
t.commit().await?;
} else {
// check which paths changed
let paths = diff_paths(¤t, &device);
if paths.is_empty() {
// there was no change
return Ok(());
}
let generation = device.set_incremented_generation(¤t)?;
let uid = current.uid;
accessor
.update(device, Some(aliases))
.await
.map_err(|err| match err.sql_state() {
Some(state) if state == &SqlState::UNIQUE_VIOLATION => {
ServiceError::Conflict("Unique key violation".to_string())
}
_ => err,
})?;
// create events
let events = Event::new_device(
self.instance.clone(),
application,
name,
uid,
generation,
paths,
);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change event
events.send_with(&self.sender).await?;
}
// done
Ok(())
}
async fn delete_device(
&self,
identity: &UserInformation,
application: &str,
device: &str,
params: DeleteParams,
) -> Result<(), Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresDeviceAccessor::new(&t);
// get current state for diffing
let mut current = match accessor.get(&application, &device, Lock::ForUpdate).await? {
Some(device) => Ok(device),
None => Err(ServiceError::NotFound),
}?;
if current.deletion_timestamp.is_some() {
return Ok(());
}
// check if the user has access to the device, we can do this after some initial checks
// that would return "not found" anyway.
// Instead of "no access" we return "not found" here, as we don't want users that don't
// have access to application to probe for devices.
let app = PostgresApplicationAccessor::new(&t)
.get(application, Lock::None)
.await?
.ok_or(ServiceError::NotFound)?;
// ensure we have access, but don't confirm the device if we don't
ensure_with(&app, identity, Permission::Write, || ServiceError::NotFound)?;
// check the preconditions
utils::check_preconditions(¶ms.preconditions, ¤t)?;
// there is no need to use the provided constraints, we as locked the entry "for update"
// next generation
let generation = current.increment_generation()?;
let uid = current.uid;
// if there are no finalizers ...
let path = if current.finalizers.is_empty() {
// ... we can directly delete
accessor.delete(application, device).await?;
vec![]
} else {
// ... otherwise, mark the device deleted
log::debug!("Pending finalizers: {:?}", current.finalizers);
// update deleted timestamp
current.deletion_timestamp = Some(Utc::now());
// update the record
accessor.update(current, None).await?;
vec![".metadata".into()]
};
// create events
let events = Event::new_device(
self.instance.clone(),
application,
device,
uid,
generation,
path,
);
// send events to outbox
Self::send_to_outbox(&t, &events).await?;
// commit
t.commit().await?;
// send change events
events.send_with(&self.sender).await?;
// done
Ok(())
}
}
|
use crate::{
audio_encoder::AudioEncoder, order::frame::FrameAddress, order::*, packet::Packet,
subtitle_encoder::SubtitleEncoder, tools, video_encoder::VideoEncoder,
};
use ffmpeg_sys_next::*;
use std::{
collections::{BTreeMap, HashMap},
ffi::{c_void, CString},
ptr::null_mut,
};
#[derive(Debug)]
pub struct FormatContext {
pub filename: String,
pub format_context: *mut AVFormatContext,
streams: Vec<*mut AVStream>,
frames: Vec<FrameAddress>,
frame_index: usize,
}
impl FormatContext {
pub fn new(filename: &str) -> Result<FormatContext, String> {
Ok(FormatContext {
filename: filename.to_string(),
format_context: null_mut(),
streams: vec![],
frames: vec![],
frame_index: 0,
})
}
pub fn set_frames_addresses(&mut self, frames: &[FrameAddress]) {
self.frames = frames.to_vec();
}
pub fn open_input(&mut self) -> Result<(), String> {
unsafe {
self.format_context = avformat_alloc_context();
let filename = CString::new(self.filename.to_owned()).unwrap();
if avformat_open_input(
&mut self.format_context,
filename.as_ptr(),
null_mut(),
null_mut(),
) < 0
{
return Err(format!("Unable to open input file {:?}", self.filename));
}
avformat_find_stream_info(self.format_context, null_mut());
}
Ok(())
}
pub fn close_input(&mut self) {
unsafe {
avformat_close_input(&mut self.format_context);
}
}
pub fn open_output(
&mut self,
parameters: &HashMap<String, ParameterValue>,
) -> Result<(), String> {
unsafe {
let filename = CString::new(self.filename.to_owned()).unwrap();
if avformat_alloc_output_context2(
&mut self.format_context,
null_mut(),
null_mut(),
filename.as_ptr(),
) < 0
{
return Err(format!("Unable to open output file {:?}", self.filename));
}
set_parameters(self.format_context as *mut c_void, parameters)?;
}
Ok(())
}
pub fn add_video_stream(&mut self, encoder: &VideoEncoder) -> Result<(), String> {
unsafe {
let av_stream = avformat_new_stream(self.format_context, null_mut());
if av_stream.is_null() {
return Err("Unable to create new stream".to_owned());
}
(*av_stream).id = ((*self.format_context).nb_streams - 1) as i32;
(*av_stream).time_base = (*encoder.codec_context).time_base;
avcodec_parameters_from_context((*av_stream).codecpar, encoder.codec_context);
self.streams.push(av_stream);
}
Ok(())
}
pub fn add_audio_stream(&mut self, encoder: &AudioEncoder) -> Result<(), String> {
unsafe {
let av_stream = avformat_new_stream(self.format_context, null_mut());
if av_stream.is_null() {
return Err("Unable to create new stream".to_owned());
}
(*av_stream).id = ((*self.format_context).nb_streams - 1) as i32;
(*av_stream).time_base = (*encoder.codec_context).time_base;
avcodec_parameters_from_context((*av_stream).codecpar, encoder.codec_context);
self.streams.push(av_stream);
}
Ok(())
}
pub fn add_subtitle_stream(&mut self, encoder: &SubtitleEncoder) -> Result<(), String> {
unsafe {
let av_stream = avformat_new_stream(self.format_context, null_mut());
if av_stream.is_null() {
return Err("Unable to create new stream".to_owned());
}
(*av_stream).id = ((*self.format_context).nb_streams - 1) as i32;
(*av_stream).time_base = (*encoder.codec_context).time_base;
avcodec_parameters_from_context((*av_stream).codecpar, encoder.codec_context);
self.streams.push(av_stream);
}
Ok(())
}
pub fn get_stream(&self, stream_index: isize) -> *mut AVStream {
unsafe { *(*self.format_context).streams.offset(stream_index) }
}
pub fn get_nb_streams(&self) -> u32 {
if !self.frames.is_empty() {
return 1;
}
unsafe { (*self.format_context).nb_streams }
}
pub fn get_format_name(&self) -> String {
unsafe { tools::to_string((*(*self.format_context).iformat).name) }
}
pub fn get_format_long_name(&self) -> String {
unsafe { tools::to_string((*(*self.format_context).iformat).long_name) }
}
pub fn get_program_count(&self) -> u32 {
unsafe { (*self.format_context).nb_programs }
}
pub fn get_start_time(&self) -> Option<f32> {
unsafe {
if (*self.format_context).start_time == AV_NOPTS_VALUE {
None
} else {
Some((*self.format_context).start_time as f32 / AV_TIME_BASE as f32)
}
}
}
pub fn get_duration(&self) -> Option<f64> {
unsafe {
if (*self.format_context).duration == AV_NOPTS_VALUE {
None
} else {
Some((*self.format_context).duration as f64 / f64::from(AV_TIME_BASE))
}
}
}
pub fn get_bit_rate(&self) -> Option<i64> {
unsafe {
if (*self.format_context).bit_rate == AV_NOPTS_VALUE || (*self.format_context).bit_rate == 0 {
None
} else {
Some((*self.format_context).bit_rate)
}
}
}
pub fn get_packet_size(&self) -> u32 {
unsafe { (*self.format_context).packet_size }
}
pub fn get_stream_type(&self, stream_index: isize) -> AVMediaType {
unsafe { (*(**(*self.format_context).streams.offset(stream_index)).codecpar).codec_type }
}
pub fn get_stream_type_name(&self, stream_index: isize) -> String {
unsafe { tools::to_string(av_get_media_type_string(self.get_stream_type(stream_index))) }
}
pub fn get_codec_id(&self, stream_index: isize) -> AVCodecID {
unsafe { (*(**(*self.format_context).streams.offset(stream_index)).codecpar).codec_id }
}
pub fn get_metadata(&self) -> BTreeMap<String, String> {
unsafe {
let mut tag = null_mut();
let key = CString::new("").unwrap();
let mut metadata = BTreeMap::new();
loop {
tag = av_dict_get(
(*self.format_context).metadata,
key.as_ptr(),
tag,
AV_DICT_IGNORE_SUFFIX,
);
if tag.is_null() {
break;
}
let k = tools::to_string((*tag).key);
let v = tools::to_string((*tag).value);
metadata.insert(k.to_string(), v.to_string());
}
metadata
}
}
pub fn next_packet(&mut self) -> Result<Packet, String> {
if !self.frames.is_empty() {
if self.frame_index >= self.frames.len() {
return Err("End of data stream".to_string());
}
let frame = &self.frames[self.frame_index];
unsafe {
let filename = CString::new(self.filename.to_owned()).unwrap();
let mut avio_context: *mut AVIOContext = null_mut();
check_result!(avio_open(
&mut avio_context,
filename.as_ptr(),
AVIO_FLAG_READ
));
if avio_seek(avio_context, frame.offset as i64, 0) < 0 {
println!("ERROR !");
};
let packet = av_packet_alloc();
check_result!(av_new_packet(packet, frame.size as i32));
check_result!(avio_read(avio_context, (*packet).data, (*packet).size));
check_result!(avio_close(avio_context));
self.frame_index += 1;
return Ok(Packet { name: None, packet });
}
}
unsafe {
let mut packet = av_packet_alloc();
av_init_packet(packet);
if av_read_frame(self.format_context, packet) < 0 {
av_packet_free(&mut packet);
return Err("Unable to read next packet".to_string());
}
Ok(Packet { name: None, packet })
}
}
}
unsafe impl Send for FormatContext {}
impl From<*mut AVFormatContext> for FormatContext {
fn from(format_context: *mut AVFormatContext) -> Self {
FormatContext {
filename: "virtual_source".to_string(),
format_context,
streams: vec![],
frames: vec![],
frame_index: 0,
}
}
}
impl Drop for FormatContext {
fn drop(&mut self) {
unsafe {
if !self.format_context.is_null() {
avformat_free_context(self.format_context);
}
}
}
}
|
use std::{sync::Arc, time::Duration};
use lavalink_rs::gateway::LavalinkEventHandler;
use poise::{
serenity::async_trait,
serenity_prelude::{Channel, GuildId, Http, Mentionable, UserId},
};
use songbird::Songbird;
use tracing::{debug, info};
use crate::{
constants::MAX_SINGLE_ENTRY_LENGTH,
types::{IdleHashMap, LastMessageHashMap},
utils::helpers::{chop_str, display_time_span},
};
pub struct LavalinkHandler {
guild_last_message_map: LastMessageHashMap,
guild_idle_map: IdleHashMap,
http: Arc<Http>,
songbird: Arc<Songbird>,
}
impl LavalinkHandler {
pub fn new(
guild_last_message_map: LastMessageHashMap,
guild_idle_map: IdleHashMap,
http: Arc<Http>,
songbird: Arc<Songbird>,
) -> Self {
Self {
guild_last_message_map,
guild_idle_map,
http,
songbird,
}
}
}
const MAX_IDLE: Duration = Duration::from_secs(900);
#[async_trait]
impl LavalinkEventHandler for LavalinkHandler {
async fn stats(
&self,
lava_client: lavalink_rs::LavalinkClient,
event: lavalink_rs::model::Stats,
) {
let guild_idle_map = self.guild_idle_map.read().await;
for (guild_id, instant) in guild_idle_map.iter() {
let elapsed = instant.elapsed();
if elapsed > MAX_IDLE {
let _ = self.songbird.remove(*guild_id).await;
let _ = lava_client.destroy(*guild_id).await;
{
let mut guild_idle_map = self.guild_idle_map.write().await;
guild_idle_map.remove(guild_id);
}
}
}
info!("{:?}", event)
}
async fn player_update(
&self,
_client: lavalink_rs::LavalinkClient,
event: lavalink_rs::model::PlayerUpdate,
) {
debug!("{:?}", event)
}
async fn track_start(
&self,
_client: lavalink_rs::LavalinkClient,
event: lavalink_rs::model::TrackStart,
) {
{
let mut idle_time_map = self.guild_idle_map.write().await;
idle_time_map.insert(event.guild_id.0, tokio::time::Instant::now());
}
info!("{:?}", event)
}
async fn track_finish(
&self,
client: lavalink_rs::LavalinkClient,
event: lavalink_rs::model::TrackFinish,
) {
let guild_id = GuildId(event.guild_id.0);
if let Some(node) = client.nodes().await.get(&guild_id.0) {
if let Some(next_track) = node.queue.first() {
let last_message_map = self.guild_last_message_map.read().await;
if let Some(channel_id) = last_message_map.get(&guild_id.0) {
if let Ok(channel) = self.http.get_channel(channel_id.0).await {
match channel {
Channel::Guild(guild_channel) => {
let track_info = next_track.track.info.as_ref().unwrap();
let _ = guild_channel
.send_message(&self.http, |m| {
m.embed(|e| {
e.title("Now Playing")
.field(
"Track:",
format!(
"[{}]({})",
chop_str(
track_info.title.as_str(),
MAX_SINGLE_ENTRY_LENGTH
),
track_info.uri,
),
false,
)
.field(
"Duration:",
display_time_span(track_info.length),
true,
)
.field(
"Requested By:",
UserId(
next_track
.requester
.expect(
"Expected a requester associated \
with a playing track",
)
.0,
)
.mention(),
true,
)
})
})
.await;
}
_ => {}
}
}
}
}
}
}
async fn websocket_closed(
&self,
_client: lavalink_rs::LavalinkClient,
event: lavalink_rs::model::WebSocketClosed,
) {
info!("{:?}", event)
}
async fn player_destroyed(
&self,
_client: lavalink_rs::LavalinkClient,
event: lavalink_rs::model::PlayerDestroyed,
) {
info!("{:?}", event)
}
async fn track_exception(
&self,
_client: lavalink_rs::LavalinkClient,
event: lavalink_rs::model::TrackException,
) {
info!("{:?}", event)
}
}
|
use crate::{
auth::UserDetail,
server::{
chancomms::ControlChanMsg,
controlchan::{
error::ControlChanError,
handler::{CommandContext, CommandHandler},
Reply, ReplyCode,
},
ftpserver::options::SiteMd5,
},
storage::{StorageBackend, FEATURE_SITEMD5},
};
use async_trait::async_trait;
use std::{path::PathBuf, sync::Arc};
use tokio::sync::mpsc::Sender;
#[derive(Debug)]
pub struct Md5 {
path: PathBuf,
}
impl Md5 {
pub fn new(path: PathBuf) -> Self {
Md5 { path }
}
}
#[async_trait]
impl<Storage, User> CommandHandler<Storage, User> for Md5
where
User: UserDetail,
Storage: StorageBackend<User> + 'static,
{
#[tracing_attributes::instrument]
async fn handle(&self, args: CommandContext<Storage, User>) -> Result<Reply, ControlChanError> {
let session = args.session.lock().await;
let user = session.user.clone();
let storage = Arc::clone(&session.storage);
let path = session.cwd.join(self.path.clone());
let tx_success: Sender<ControlChanMsg> = args.tx_control_chan.clone();
let tx_fail: Sender<ControlChanMsg> = args.tx_control_chan.clone();
let logger = args.logger;
match args.sitemd5 {
SiteMd5::All => {}
SiteMd5::Accounts => match &session.username {
Some(u) => {
if u == "anonymous" || u == "ftp" {
return Ok(Reply::new(ReplyCode::CommandNotImplemented, "Command is not available."));
}
}
None => {
slog::error!(logger, "NoneError for username. This shouldn't happen.");
return Ok(Reply::new(ReplyCode::NotLoggedIn, "Please open a new connection to re-authenticate"));
}
},
SiteMd5::None => {
return Ok(Reply::new(ReplyCode::CommandNotImplemented, "Command is not available."));
}
}
if args.storage_features & FEATURE_SITEMD5 == 0 {
return Ok(Reply::new(ReplyCode::CommandNotImplemented, "Not supported by the selected storage back-end."));
}
tokio::spawn(async move {
match storage.md5((*user).as_ref().unwrap(), &path).await {
Ok(md5) => {
if let Err(err) = tx_success
.send(ControlChanMsg::CommandChannelReply(Reply::new_with_string(
ReplyCode::FileStatus,
format!("{} {}", md5, path.as_path().display()),
)))
.await
{
slog::warn!(logger, "MD5: Could not send internal message to notify of MD5 success: {}", err);
}
}
Err(err) => {
slog::warn!(logger, "MD5: Failed to retrieve MD5 sum for {:?} from backend: {}", path, err);
if let Err(err) = tx_fail.send(ControlChanMsg::StorageError(err)).await {
slog::warn!(logger, "MD5: Could not send internal message to notify of MD5 failure: {}", err);
}
}
}
});
Ok(Reply::none())
}
}
|
use crate::ContinuumSpotTally;
use codec::{Decode, Encode, EncodeLike, Input, Output};
use frame_support::sp_runtime::traits::AccountIdConversion;
use primitives::SpotId;
use sp_runtime::{
traits::{Saturating, Zero},
RuntimeDebug,
};
use sp_std::{convert::TryFrom, prelude::*, result::Result};
// use crate::mock::AccountId;
/// Struct of every Continuum vote
#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug)]
pub struct Vote<AccountId> {
pub nay: bool,
pub who: AccountId,
}
impl<AccountId: From<u32> + Default> Default for Vote<AccountId> {
fn default() -> Self {
Vote {
nay: false,
who: Default::default(),
}
}
}
/// Keep track of voting activities of an account
#[derive(Encode, Decode, Clone, Eq, PartialEq, Default, RuntimeDebug)]
pub struct Voting<AccountId> {
pub votes: Vec<(SpotId, AccountVote<AccountId>)>,
}
/// A vote for a referendum of a particular account.
#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug)]
pub enum AccountVote<AccountId> {
/// A standard continuum vote
Standard { vote: Vote<AccountId> },
}
// impl<AccountId: From<u32> + Default> Default for AccountVote<AccountId> {
// fn default() -> Self {
// AccountVote::Standard { vote: Default::default() }
// }
// }
impl<AccountId> AccountVote<AccountId> {
pub fn vote_who(self) -> Vote<AccountId> {
match self {
AccountVote::Standard { vote } => vote,
}
}
}
|
#[doc = "Reader of register ALM2_TIME"]
pub type R = crate::R<u32, super::ALM2_TIME>;
#[doc = "Writer for register ALM2_TIME"]
pub type W = crate::W<u32, super::ALM2_TIME>;
#[doc = "Register ALM2_TIME `reset()`'s with value 0x0100_0000"]
impl crate::ResetValue for super::ALM2_TIME {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0100_0000
}
}
#[doc = "Reader of field `ALM_SEC`"]
pub type ALM_SEC_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ALM_SEC`"]
pub struct ALM_SEC_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_SEC_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f);
self.w
}
}
#[doc = "Reader of field `ALM_SEC_EN`"]
pub type ALM_SEC_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ALM_SEC_EN`"]
pub struct ALM_SEC_EN_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_SEC_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `ALM_MIN`"]
pub type ALM_MIN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ALM_MIN`"]
pub struct ALM_MIN_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_MIN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 8)) | (((value as u32) & 0x7f) << 8);
self.w
}
}
#[doc = "Reader of field `ALM_MIN_EN`"]
pub type ALM_MIN_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ALM_MIN_EN`"]
pub struct ALM_MIN_EN_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_MIN_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `ALM_HOUR`"]
pub type ALM_HOUR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ALM_HOUR`"]
pub struct ALM_HOUR_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_HOUR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x3f << 16)) | (((value as u32) & 0x3f) << 16);
self.w
}
}
#[doc = "Reader of field `ALM_HOUR_EN`"]
pub type ALM_HOUR_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ALM_HOUR_EN`"]
pub struct ALM_HOUR_EN_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_HOUR_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);
self.w
}
}
#[doc = "Reader of field `ALM_DAY`"]
pub type ALM_DAY_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ALM_DAY`"]
pub struct ALM_DAY_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_DAY_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 24)) | (((value as u32) & 0x07) << 24);
self.w
}
}
#[doc = "Reader of field `ALM_DAY_EN`"]
pub type ALM_DAY_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ALM_DAY_EN`"]
pub struct ALM_DAY_EN_W<'a> {
w: &'a mut W,
}
impl<'a> ALM_DAY_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 0:6 - Alarm seconds in BCD, 0-59"]
#[inline(always)]
pub fn alm_sec(&self) -> ALM_SEC_R {
ALM_SEC_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bit 7 - Alarm second enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_sec_en(&self) -> ALM_SEC_EN_R {
ALM_SEC_EN_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 8:14 - Alarm minutes in BCD, 0-59"]
#[inline(always)]
pub fn alm_min(&self) -> ALM_MIN_R {
ALM_MIN_R::new(((self.bits >> 8) & 0x7f) as u8)
}
#[doc = "Bit 15 - Alarm minutes enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_min_en(&self) -> ALM_MIN_EN_R {
ALM_MIN_EN_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bits 16:21 - Alarm hours in BCD, value depending on 12/24HR mode 12HR: \\[5\\]:0=AM, 1=PM, \\[4:0\\]=1-12 24HR: \\[5:0\\]=0-23"]
#[inline(always)]
pub fn alm_hour(&self) -> ALM_HOUR_R {
ALM_HOUR_R::new(((self.bits >> 16) & 0x3f) as u8)
}
#[doc = "Bit 23 - Alarm hour enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_hour_en(&self) -> ALM_HOUR_EN_R {
ALM_HOUR_EN_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bits 24:26 - Alarm Day of the week in BCD, 1-7 It is up to the user to define the meaning of the values, but 1=Monday is recommended"]
#[inline(always)]
pub fn alm_day(&self) -> ALM_DAY_R {
ALM_DAY_R::new(((self.bits >> 24) & 0x07) as u8)
}
#[doc = "Bit 31 - Alarm Day of the Week enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_day_en(&self) -> ALM_DAY_EN_R {
ALM_DAY_EN_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:6 - Alarm seconds in BCD, 0-59"]
#[inline(always)]
pub fn alm_sec(&mut self) -> ALM_SEC_W {
ALM_SEC_W { w: self }
}
#[doc = "Bit 7 - Alarm second enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_sec_en(&mut self) -> ALM_SEC_EN_W {
ALM_SEC_EN_W { w: self }
}
#[doc = "Bits 8:14 - Alarm minutes in BCD, 0-59"]
#[inline(always)]
pub fn alm_min(&mut self) -> ALM_MIN_W {
ALM_MIN_W { w: self }
}
#[doc = "Bit 15 - Alarm minutes enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_min_en(&mut self) -> ALM_MIN_EN_W {
ALM_MIN_EN_W { w: self }
}
#[doc = "Bits 16:21 - Alarm hours in BCD, value depending on 12/24HR mode 12HR: \\[5\\]:0=AM, 1=PM, \\[4:0\\]=1-12 24HR: \\[5:0\\]=0-23"]
#[inline(always)]
pub fn alm_hour(&mut self) -> ALM_HOUR_W {
ALM_HOUR_W { w: self }
}
#[doc = "Bit 23 - Alarm hour enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_hour_en(&mut self) -> ALM_HOUR_EN_W {
ALM_HOUR_EN_W { w: self }
}
#[doc = "Bits 24:26 - Alarm Day of the week in BCD, 1-7 It is up to the user to define the meaning of the values, but 1=Monday is recommended"]
#[inline(always)]
pub fn alm_day(&mut self) -> ALM_DAY_W {
ALM_DAY_W { w: self }
}
#[doc = "Bit 31 - Alarm Day of the Week enable: 0=ignore, 1=match"]
#[inline(always)]
pub fn alm_day_en(&mut self) -> ALM_DAY_EN_W {
ALM_DAY_EN_W { w: self }
}
}
|
use std::fs::{self, File};
use std::path::PathBuf;
use std::process::Stdio;
use std::sync::atomic::{AtomicUsize, Ordering};
use tokio::{io::BufReader, prelude::*, process};
use uuid::prelude::*;
use crate::integration::common::TEST_REALM;
/// A handle to a started router; drop to close the router and delete its config dir.
pub struct RouterHandle {
crossbar_dir: PathBuf,
router: process::Child,
port: u16,
}
impl RouterHandle {
/// Gets the URL to connect to the router.
pub fn get_url(&self) -> String {
format!("ws://127.0.0.1:{}", self.port)
}
}
impl Drop for RouterHandle {
fn drop(&mut self) {
self.router.kill().expect("could not kill router");
if let Err(err) = fs::remove_dir_all(&self.crossbar_dir) {
println!("Failed to delete temp dir {:?}: {}", self.crossbar_dir, err);
}
}
}
/// Starts a WAMP router, listening on localhost:9001.
pub async fn start_router() -> RouterHandle {
lazy_static! {
static ref PORT_NUMBER: AtomicUsize = AtomicUsize::new(9000);
}
let port = PORT_NUMBER.fetch_add(1, Ordering::SeqCst) as u16;
let crossbar_dir = set_crossbar_configuration(port).await;
println!("Created crossbar config: {:?}", crossbar_dir);
let mut router = process::Command::new("crossbar")
.arg("start")
.arg("--cbdir")
.arg({
let mut path = PathBuf::new();
path.push(&crossbar_dir);
path.push(".crossbar");
path
})
// Tell python (crossbar) to flush stdout after every line
.env("PYTHONUNBUFFERED", "1")
// Tell python (crossbar) to use UTF-8 for I/O
.env("PYTHONIOENCODING", "utf8")
.stdout(Stdio::piped())
.stderr(Stdio::inherit())
.spawn()
.expect("could not run crossbar");
println!("Spawned child process: {}", router.id());
// Wait for the router to be ready.
// Wait for the peer to signal that it's ready.
let mut stdout = BufReader::new(router.stdout.as_mut().unwrap()).lines();
loop {
match stdout.next_line().await {
Ok(Some(line))
if line.contains("Ok, local node configuration booted successfully!") =>
{
break;
}
_ => {}
}
}
println!("Crossbar router ready!");
RouterHandle {
crossbar_dir,
router,
port,
}
}
async fn set_crossbar_configuration(port: u16) -> PathBuf {
let crossbar_dir = {
let mut path = tempfile::tempdir().unwrap().into_path();
path.push(".");
path.push(format!("{}", Uuid::new_v4()));
path
};
let status = process::Command::new("crossbar")
.arg("init")
.arg("--appdir")
.arg(&crossbar_dir)
.stdout(Stdio::null())
.status()
.await
.expect("could not run `crossbar init`");
if !status.success() {
panic!("`crossbar init` exited with status code {}", status);
}
// Write the configuration...
let config = json!({
"$schema": "https://raw.githubusercontent.com/crossbario/crossbar/master/crossbar.json",
"version": 2,
"controller": {},
"workers": [
{
"type": "router",
"realms": [
{
"name": TEST_REALM,
"roles": [
{
"name": "anonymous",
"permissions": [
{
"uri": "",
"match": "prefix",
"allow": {
"call": true,
"register": true,
"publish": true,
"subscribe": true,
},
"disclose": {
"caller": true,
"publisher": true,
},
"cache": true,
}
]
}
]
}
],
"transports": [
{
"type": "websocket",
"endpoint": {
"type": "tcp",
"port": port,
},
"debug": true,
}
]
}
]
});
{
let mut path = PathBuf::new();
path.push(&crossbar_dir);
path.push(".crossbar");
path.push("config.json");
let file = File::create(path).expect("could not open crossbar config file");
serde_json::to_writer_pretty(&file, &config)
.expect("could not write to crossbar config file");
}
fs::canonicalize(crossbar_dir).expect("failed to canonicalize crossbar dir")
}
|
pub mod bomb;
pub mod explosion;
pub mod player;
|
/// Assert a condition is true.
///
/// * When true, return `()`.
///
/// * Otherwise, call [`panic!`] with a message and the values of the
/// expressions with their debug representations.
///
/// # Example
///
/// ```rust
/// # #[macro_use] extern crate assertable; fn main() {
/// assert!(true);
/// //-> ()
/// # }
/// ```
///
/// ```rust
/// # #[macro_use] extern crate assertable; fn main() {
/// // assert!(false);
/// //-> panic!("assertion failed: false")
/// # }
/// ```
///
/// This macro has a second form where a custom message can be provided.
// `assert_eq` macro is provided by Rust `std`.
#[cfg(test)]
mod tests {
#[test]
fn test_assert_x_arity_2_success() {
let a = true;
let x = assert!(a);
assert_eq!(
x,
()
);
}
#[test]
#[should_panic (expected = "assertion failed: a")]
fn test_assert_x_arity_2_failure() {
let a = false;
let _ = assert!(a);
}
#[test]
fn test_assert_x_arity_3_success() {
let a = true;
let x = assert!(a, "message");
assert_eq!(
x,
()
);
}
#[test]
#[should_panic (expected = "message")]
fn test_assert_x_arity_3_failure() {
let a = false;
let _ = assert!(a, "message");
}
}
|
//! Generator macro.
ligen::define_binding_generator!(name = "ligen_csharp", generator = "ligen_csharp_core::Generator");
// Or if you want to create a project generator:
// ligen::define_project_generator!(name = "ligen_csharp", generator = "ligen_csharp_core::Generator"); |
pub enum BinaryTree<T> {
Empty,
NonEmpty(Box<TreeNode<T>>),
}
pub struct TreeNode<T> {
element: T,
left: BinaryTree<T>,
right: BinaryTree<T>,
}
impl<T: Ord> BinaryTree<T> {
pub fn add(&mut self, value: T) {
match *self {
BinaryTree::Empty =>
*self = BinaryTree::NonEmpty(Box::new(TreeNode {
element: value,
left: BinaryTree::Empty,
right: BinaryTree::Empty,
})),
BinaryTree::NonEmpty(ref mut node) =>
if value <= node.element {
node.left.add(value);
} else {
node.right.add(value);
}
}
}
fn iter(&self) -> TreeIter<T> {
let mut iter = TreeIter { unvisited: Vec::new() };
iter.push_left_edge(self);
iter
}
}
fn make_node<T>(left: BinaryTree<T>, element: T, right: BinaryTree<T>)
-> BinaryTree<T>
{
BinaryTree::NonEmpty(Box::new(TreeNode { left, element, right }))
}
impl<'a, T> Iterator for TreeIter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
let node = match self.unvisited.pop() {
None => return None,
Some(n) => n
};
self.push_left_edge(&node.right);
Some(&node.element)
}
}
impl<'a, T: 'a + Ord> IntoIterator for &'a BinaryTree<T> {
type Item = &'a T;
type IntoIter = TreeIter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
pub struct TreeIter<'a, T: 'a> {
unvisited: Vec<&'a TreeNode<T>>
}
impl <'a, T: 'a> TreeIter<'a, T> {
fn push_left_edge(&mut self, mut tree: &'a BinaryTree<T>) {
while let BinaryTree::NonEmpty(ref node) = *tree {
self.unvisited.push(node);
tree = &node.left;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn basic() {
let subtree_l = make_node(BinaryTree::Empty, "mecha", BinaryTree::Empty);
let subtree_rl = make_node(BinaryTree::Empty, "droid", BinaryTree::Empty);
let subtree_r = make_node(subtree_rl, "robot", BinaryTree::Empty);
let tree = make_node(subtree_l, "Jaeger", subtree_r);
// Iterate over it.
let mut v = Vec::new();
for kind in &tree {
v.push(*kind);
}
assert_eq!(v, ["mecha", "Jaeger", "droid", "robot"]);
}
}
|
use std::path::PathBuf;
// 一个标准的unix socket path如下
// /tmp/breeze/socks/config+v1+breeze+feed.content.icy:user@mc@cs.sock
pub struct UnixSocketPath;
impl UnixSocketPath {
// 第一个部分是biz
// 第二个部分是资源类型
// 第三个部分是discovery类型
pub fn parse(path: &String) -> Option<(String, String, String)> {
let base = Self::file_name(&path);
let base = Self::file_name(&base.replace("+", "/"));
let fields: Vec<String> = base.split("@").map(|e| e.to_string()).collect();
if fields.len() == 3 {
// 只取':'后面的作为biz
let idx = fields[0].find(':').map(|idx| idx + 1).unwrap_or(0);
Some((
fields[0][idx..].to_string(),
fields[1].clone(),
fields[2].clone(),
))
} else {
None
}
}
fn file_name(name: &str) -> String {
PathBuf::from(name)
.file_name()
.expect("valid file name")
.to_str()
.expect("not utf8 name")
.to_string()
}
}
|
pub(crate) mod modification_plan_tree;
use self::modification_plan_tree::ModificationPlanTree;
/// Modification plan from which an executor can do its work deterministically.
#[derive(Clone, PartialEq, Debug, new)]
pub(crate) struct ModificationPlan {
pub(crate) plan_tree: ModificationPlanTree,
}
|
#[derive(Debug)]
pub enum Ident {
Uuid(String),
Md5(String),
Name(String)
}
|
use crate::color::Color;
use serde::{Deserialize, Serialize};
const OPACITY_PRECISION: usize = 3;
fn format_opacity(name: &str, opacity: f32) -> String {
if (opacity - 1.).abs() > 10f32.powi(-(OPACITY_PRECISION as i32)) {
format!(r#" {}-opacity="{:.precision$}""#, name, opacity, precision = OPACITY_PRECISION)
} else {
String::new()
}
}
#[repr(C)]
#[derive(Debug, Clone, Copy, PartialEq, Default, Serialize, Deserialize)]
pub struct Fill {
color: Option<Color>,
}
impl Fill {
pub fn new(color: Color) -> Self {
Self { color: Some(color) }
}
pub fn color(&self) -> Option<Color> {
self.color
}
pub fn none() -> Self {
Self { color: None }
}
pub fn render(&self) -> String {
match self.color {
Some(c) => format!(r##" fill="#{}"{}"##, c.rgb_hex(), format_opacity("fill", c.a())),
None => r#" fill="none""#.to_string(),
}
}
}
#[repr(C)]
#[derive(Debug, Clone, Copy, PartialEq, Default, Serialize, Deserialize)]
pub struct Stroke {
color: Color,
width: f32,
}
impl Stroke {
pub fn new(color: Color, width: f32) -> Self {
Self { color, width }
}
pub fn color(&self) -> Color {
self.color
}
pub fn width(&self) -> f32 {
self.width
}
pub fn render(&self) -> String {
format!(r##" stroke="#{}"{} stroke-width="{}""##, self.color.rgb_hex(), format_opacity("stroke", self.color.a()), self.width)
}
}
#[repr(C)]
#[derive(Debug, Clone, Copy, PartialEq, Default, Serialize, Deserialize)]
pub struct PathStyle {
stroke: Option<Stroke>,
fill: Option<Fill>,
}
impl PathStyle {
pub fn new(stroke: Option<Stroke>, fill: Option<Fill>) -> Self {
Self { stroke, fill }
}
pub fn fill(&self) -> Option<Fill> {
self.fill
}
pub fn stroke(&self) -> Option<Stroke> {
self.stroke
}
pub fn set_fill(&mut self, fill: Fill) {
self.fill = Some(fill);
}
pub fn set_stroke(&mut self, stroke: Stroke) {
self.stroke = Some(stroke);
}
pub fn clear_fill(&mut self) {
self.fill = None;
}
pub fn clear_stroke(&mut self) {
self.stroke = None;
}
pub fn render(&self) -> String {
format!(
"{}{}",
match self.fill {
Some(fill) => fill.render(),
None => String::new(),
},
match self.stroke {
Some(stroke) => stroke.render(),
None => String::new(),
},
)
}
}
|
#[doc = "Register `SR` reader"]
pub type R = crate::R<SR_SPEC>;
#[doc = "Field `PVU` reader - Watchdog prescaler value update This bit is set by hardware to indicate that an update of the prescaler value is ongoing. It is reset by hardware when the prescaler update operation is completed in the VDD voltage domain (takes up to five LSI cycles). Prescaler value can be updated only when PVU bit is reset."]
pub type PVU_R = crate::BitReader;
#[doc = "Field `RVU` reader - Watchdog counter reload value update This bit is set by hardware to indicate that an update of the reload value is ongoing. It is reset by hardware when the reload value update operation is completed in the VDD voltage domain (takes up to five LSI cycles). Reload value can be updated only when RVU bit is reset."]
pub type RVU_R = crate::BitReader;
#[doc = "Field `WVU` reader - Watchdog counter window value update This bit is set by hardware to indicate that an update of the window value is ongoing. It is reset by hardware when the reload value update operation is completed in the VDD voltage domain (takes up to five LSI cycles). Window value can be updated only when WVU bit is reset."]
pub type WVU_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - Watchdog prescaler value update This bit is set by hardware to indicate that an update of the prescaler value is ongoing. It is reset by hardware when the prescaler update operation is completed in the VDD voltage domain (takes up to five LSI cycles). Prescaler value can be updated only when PVU bit is reset."]
#[inline(always)]
pub fn pvu(&self) -> PVU_R {
PVU_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Watchdog counter reload value update This bit is set by hardware to indicate that an update of the reload value is ongoing. It is reset by hardware when the reload value update operation is completed in the VDD voltage domain (takes up to five LSI cycles). Reload value can be updated only when RVU bit is reset."]
#[inline(always)]
pub fn rvu(&self) -> RVU_R {
RVU_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Watchdog counter window value update This bit is set by hardware to indicate that an update of the window value is ongoing. It is reset by hardware when the reload value update operation is completed in the VDD voltage domain (takes up to five LSI cycles). Window value can be updated only when WVU bit is reset."]
#[inline(always)]
pub fn wvu(&self) -> WVU_R {
WVU_R::new(((self.bits >> 2) & 1) != 0)
}
}
#[doc = "IWDG status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SR_SPEC;
impl crate::RegisterSpec for SR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sr::R`](R) reader structure"]
impl crate::Readable for SR_SPEC {}
#[doc = "`reset()` method sets SR to value 0"]
impl crate::Resettable for SR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `FDCAN_TEST` reader"]
pub type R = crate::R<FDCAN_TEST_SPEC>;
#[doc = "Register `FDCAN_TEST` writer"]
pub type W = crate::W<FDCAN_TEST_SPEC>;
#[doc = "Field `LBCK` reader - Loop back mode"]
pub type LBCK_R = crate::BitReader;
#[doc = "Field `LBCK` writer - Loop back mode"]
pub type LBCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TX` reader - Control of transmit pin"]
pub type TX_R = crate::FieldReader;
#[doc = "Field `TX` writer - Control of transmit pin"]
pub type TX_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `RX` reader - Receive pin Monitors the actual value of pin FDCANx_RX"]
pub type RX_R = crate::BitReader;
impl R {
#[doc = "Bit 4 - Loop back mode"]
#[inline(always)]
pub fn lbck(&self) -> LBCK_R {
LBCK_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 5:6 - Control of transmit pin"]
#[inline(always)]
pub fn tx(&self) -> TX_R {
TX_R::new(((self.bits >> 5) & 3) as u8)
}
#[doc = "Bit 7 - Receive pin Monitors the actual value of pin FDCANx_RX"]
#[inline(always)]
pub fn rx(&self) -> RX_R {
RX_R::new(((self.bits >> 7) & 1) != 0)
}
}
impl W {
#[doc = "Bit 4 - Loop back mode"]
#[inline(always)]
#[must_use]
pub fn lbck(&mut self) -> LBCK_W<FDCAN_TEST_SPEC, 4> {
LBCK_W::new(self)
}
#[doc = "Bits 5:6 - Control of transmit pin"]
#[inline(always)]
#[must_use]
pub fn tx(&mut self) -> TX_W<FDCAN_TEST_SPEC, 5> {
TX_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "FDCAN test register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fdcan_test::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fdcan_test::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FDCAN_TEST_SPEC;
impl crate::RegisterSpec for FDCAN_TEST_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fdcan_test::R`](R) reader structure"]
impl crate::Readable for FDCAN_TEST_SPEC {}
#[doc = "`write(|w| ..)` method takes [`fdcan_test::W`](W) writer structure"]
impl crate::Writable for FDCAN_TEST_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets FDCAN_TEST to value 0"]
impl crate::Resettable for FDCAN_TEST_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use actix_web::web::Data;
use actix_web::Responder;
use tracing::info;
use tracing::instrument;
use htsget_http::get_service_info_json as get_base_service_info_json;
use htsget_http::Endpoint;
use htsget_search::htsget::HtsGet;
use crate::handlers::pretty_json::PrettyJson;
use crate::AppState;
/// Gets the JSON to return for a service-info endpoint
#[instrument(skip(app_state))]
pub fn get_service_info_json<H: HtsGet + Send + Sync + 'static>(
app_state: &AppState<H>,
endpoint: Endpoint,
) -> impl Responder {
info!(endpoint = ?endpoint, "service info request");
PrettyJson(get_base_service_info_json(
endpoint,
app_state.htsget.clone(),
&app_state.config_service_info,
))
}
/// Gets the JSON to return for the reads service-info endpoint
pub async fn reads_service_info<H: HtsGet + Send + Sync + 'static>(
app_state: Data<AppState<H>>,
) -> impl Responder {
get_service_info_json(app_state.get_ref(), Endpoint::Reads)
}
/// Gets the JSON to return for the variants service-info endpoint
pub async fn variants_service_info<H: HtsGet + Send + Sync + 'static>(
app_state: Data<AppState<H>>,
) -> impl Responder {
get_service_info_json(app_state.get_ref(), Endpoint::Variants)
}
|
extern crate nalgebra as na;
extern crate nalgebra_glm as glm;
use nalgebra::Matrix4;
use nalgebra::Point3;
use nalgebra::Vector3;
#[derive(Default, Debug, Clone, Copy)]
pub struct CameraMatrices {
pub projection_matrix: [f32; 16],
pub view_matrix: [f32; 16],
pub camera_position: [f32; 3],
}
impl CameraMatrices {
fn allign(m: [f32; 16]) -> [[f32; 4]; 4] {
[
[m[0], m[1], m[2], m[3]],
[m[4], m[5], m[6], m[7]],
[m[8], m[9], m[10], m[11]],
[m[12], m[13], m[14], m[15]],
]
}
pub fn alligned_projection_matrix(self) -> [[f32; 4]; 4] {
Self::allign(self.projection_matrix)
}
pub fn alligned_view_matrix(self) -> [[f32; 4]; 4] {
Self::allign(self.view_matrix)
}
pub fn emmpty() -> [[f32; 4]; 4] {
[
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
]
}
}
pub trait ViewAndProject {
fn view_m(&self) -> Matrix4<f32>;
fn proj_m(&self) -> Matrix4<f32>;
fn camera_p(&self) -> Point3<f32>;
fn update_ar(&mut self, aspect_ratio: f32);
fn update_fov(&mut self, fov: f32);
fn get_matrices(&self) -> CameraMatrices {
let p = self.proj_m();
let v = self.view_m();
let c = self.camera_p();
CameraMatrices {
projection_matrix: [
p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7], p[8], p[9], p[10], p[11], p[12],
p[13], p[14], p[15],
],
view_matrix: [
v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7], v[8], v[9], v[10], v[11], v[12],
v[13], v[14], v[15],
],
camera_position: [c[0], c[1], c[2]],
}
}
}
fn calcullate_view_m(eye: Point3<f32>, dest: Point3<f32>) -> Matrix4<f32> {
let up: Vector3<f32> = Vector3::new(0.0, -1.0, 0.0);
let center = Vector3::new(dest[0], dest[1], dest[2]);
let eye_v = Vector3::new(eye[0], eye[1], eye[2]);
glm::look_at(&eye_v, ¢er, &up)
}
fn calcullate_proj_m(
fov: f32,
aspect_ratio: f32,
near_plane_dist: f32,
far_plane_dist: f32,
) -> Matrix4<f32> {
glm::perspective(aspect_ratio, fov, near_plane_dist, far_plane_dist)
}
#[derive(Debug, Copy, Clone)]
pub struct StickyRotatingCamera {
view_m: Matrix4<f32>,
proj_m: Matrix4<f32>,
yaw: f32,
pitch: f32,
distance: f32,
fov: f32,
aspect_ratio: f32,
near_plane_dist: f32,
far_plane_dist: f32,
eye: Point3<f32>,
}
impl StickyRotatingCamera {
fn calculate_eye(distance: f32, yaw: f32, pitch: f32) -> Point3<f32> {
let x = yaw.cos() * pitch.cos();
let y = -pitch.sin();
let z = yaw.sin() * pitch.cos();
Point3::new(x * distance, y * distance, z * distance)
}
pub fn new(distance: f32, yaw: f32, pitch: f32) -> Self {
log::trace!("insance of {}", std::any::type_name::<Self>());
let eye = Self::calculate_eye(distance, yaw, pitch);
let fov: f32 = 45.0;
let aspect_ratio: f32 = 16.0 / 9.0;
let near_plane_dist: f32 = 0.1;
let far_plane_dist: f32 = 1000.0;
let view_m: Matrix4<f32> = calcullate_view_m(eye, Point3::new(0.0, 0.0, 0.0));
let proj_m: Matrix4<f32> =
calcullate_proj_m(fov, aspect_ratio, near_plane_dist, far_plane_dist);
StickyRotatingCamera {
view_m,
proj_m,
yaw,
pitch,
distance,
fov,
aspect_ratio,
near_plane_dist,
far_plane_dist,
eye,
}
}
pub fn set_yaw(&mut self, yaw: f32) {
let eye = Self::calculate_eye(self.distance, yaw, self.pitch);
let view_m: Matrix4<f32> = calcullate_view_m(eye, Point3::new(0.0, 0.0, 0.0));
self.view_m = view_m;
self.yaw = yaw;
}
pub fn set_pitch(&mut self, pitch: f32) {
let eye = Self::calculate_eye(self.distance, self.yaw, pitch);
let view_m: Matrix4<f32> = calcullate_view_m(eye, Point3::new(0.0, 0.0, 0.0));
self.view_m = view_m;
self.pitch = pitch;
}
}
impl ViewAndProject for StickyRotatingCamera {
fn view_m(&self) -> Matrix4<f32> {
self.view_m
}
fn proj_m(&self) -> Matrix4<f32> {
self.proj_m
}
fn update_ar(&mut self, aspect_ratio: f32) {
let proj_m: Matrix4<f32> = calcullate_proj_m(
self.fov,
aspect_ratio,
self.near_plane_dist,
self.far_plane_dist,
);
self.aspect_ratio = aspect_ratio;
self.proj_m = proj_m;
}
fn update_fov(&mut self, fov: f32) {
let proj_m: Matrix4<f32> = calcullate_proj_m(
fov,
self.aspect_ratio,
self.near_plane_dist,
self.far_plane_dist,
);
self.fov = fov;
self.proj_m = proj_m;
}
fn camera_p(&self) -> Point3<f32> {
self.eye
}
}
|
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::ast::ReturnStatement;
use crate::parser::Parser;
pub fn parse(parser: &mut Parser) -> Option<ReturnStatement> {
if Parser::end_of_statement_token(parser.peek_token().token_type) {
parser.advance();
return Some(ReturnStatement { value: None });
}
let value = parser.parse_expression()?;
parser.expect_end_of_statement()?;
return Some(ReturnStatement { value: Some(value) });
}
#[cfg(test)]
mod tests {
use super::*;
use crate::lexer::Lexer;
use pretty_assertions::assert_eq;
use serde_json::json;
#[test]
fn parses_return_statement() {
let mut parser = Parser::new(Lexer::new("return l:foo"));
let program = parser.parse();
assert_eq!(parser.errors, &[]);
assert_eq!(
program.dump_for_testing(),
json!([{"return": {"value": {"identifier": "l:foo"}}}]),
);
}
#[test]
fn parses_empty_return() {
let mut parser = Parser::new(Lexer::new("return"));
let program = parser.parse();
assert_eq!(parser.errors, &[]);
assert_eq!(program.dump_for_testing(), json!([{"return": {}}]));
}
}
|
use render::RenderBuilder;
use rise_stylesheet::styles::style::{Style, StyleExt};
use rise_stylesheet::yoga::context::{get_context, get_context_mut, ContextExt, NodeContextExt};
use rise_stylesheet::yoga::yoga_sys;
use rise_stylesheet::yoga::Node;
use std::any::Any;
use std::boxed::Box;
use std::cell::RefCell;
use std::ops::Deref;
use std::rc::Rc;
pub struct NodeContext {
pub style: Box<StyleExt>,
pub data: Box<Any>,
}
impl ContextExt for NodeContext {
}
pub trait StyleContextExt {
fn draw(&self, render: Rc<RefCell<RenderBuilder>>) -> Option<()>;
fn get_mut_styles(&self) -> Option<&mut Box<StyleExt>>;
fn render(&self, render: Rc<RefCell<RenderBuilder>>);
fn get_styles(&self) -> Option<&Box<StyleExt>>;
}
impl StyleContextExt for Node {
fn get_styles(&self) -> Option<&Box<StyleExt>> {
let context_any = get_context(&self.inner_node)?;
let context = context_any.downcast_ref::<NodeContext>()?;
Some(&context.style)
}
fn get_mut_styles(&self) -> Option<&mut Box<StyleExt>> {
let context_any = get_context_mut(&self.inner_node)?;
let context = context_any.downcast_mut::<NodeContext>()?;
Some(&mut context.style)
}
fn draw(&self, render: Rc<RefCell<RenderBuilder>>) -> Option<()> {
let styles = self.get_mut_styles()?;
styles.draw(&self, render);
Some(())
}
fn render(&self, render: Rc<RefCell<RenderBuilder>>) {
self.draw(render.clone());
if let Some(childrens) = self.childrens() {
for children in childrens.iter() {
children.render(render.clone());
render.borrow_mut().builder.pop_stacking_context();
render.borrow_mut().builder.pop_clip_id();
}
}
}
}
impl Deref for NodeContext {
type Target = Box<Any>;
fn deref(&self) -> &Box<Any> {
&self.data
}
}
impl NodeContextExt<NodeContext> for Node {
fn new_with_context(context: NodeContext) -> Node {
let mut node = Node::new();
node.set_context(Some(context));
node
}
fn set_context(&mut self, value: Option<NodeContext>) {
use std::os::raw::c_void;
let prev_raw = unsafe { yoga_sys::YGNodeGetContext(self.inner_node) };
NodeContext::drop_raw(prev_raw);
let raw: *mut c_void = value.map_or_else(|| ::std::ptr::null_mut(), |context| NodeContext::into_raw(Box::new(context)));
unsafe { yoga_sys::YGNodeSetContext(self.inner_node, raw) }
}
fn get_own_context(&self) -> Option<&Box<Any>> {
let context_any = get_context(&self.inner_node)?;
let context = context_any.downcast_ref::<NodeContext>()?;
Some(&context.data)
}
fn get_own_context_mut(&self) -> Option<&mut Box<Any>> {
let context_any = get_context_mut(&self.inner_node)?;
let context = context_any.downcast_mut::<NodeContext>()?;
Some(&mut context.data)
}
fn drop_context(&mut self) {
// let prev_raw = unsafe { yoga_sys::YGNodeGetContext(self.inner_node) };
// NodeContext::drop_raw(prev_raw);
}
}
|
use super::{Block, BlockId, Field};
use crate::{random_id::U128Id, resource::ResourceId, JsObject, Promise};
use ordered_float::OrderedFloat;
use std::collections::BTreeMap;
use std::collections::HashSet;
use wasm_bindgen::{prelude::*, JsCast};
#[derive(Clone)]
pub struct Tab {
name: String,
items: BTreeMap<OrderedFloat<f64>, BlockId>,
}
impl Tab {
pub fn new(name: impl Into<String>) -> Self {
Self {
name: name.into(),
items: BTreeMap::new(),
}
}
pub fn name(&self) -> &String {
&self.name
}
pub fn set_name(&mut self, name: String) {
self.name = name;
}
pub fn insert(&mut self, timestamp: f64, item: BlockId) {
self.items.insert(OrderedFloat(timestamp), item);
}
pub fn iter(&self) -> impl ExactSizeIterator<Item = (f64, &BlockId)> + DoubleEndedIterator {
self.items.iter().map(|(t, b)| (t.clone().into(), b))
}
pub fn len(&self) -> usize {
self.items.len()
}
}
impl Block for Tab {
fn pack(&self) -> Promise<JsValue> {
let items = array![];
for (time, item) in &self.items {
let time: f64 = time.clone().into();
items.push(array![time, item.to_jsvalue()].as_ref());
}
let data = object! {
name: &self.name,
items: items
};
let data: js_sys::Object = data.into();
let data: JsValue = data.into();
Promise::new(|resolve| resolve(Some(data)))
}
fn unpack(field: &mut Field, val: JsValue) -> Promise<Box<Self>> {
let self_ = if let Ok(val) = val.dyn_into::<JsObject>() {
let name = val.get("name").and_then(|name| name.as_string());
let items = val.get("items").map(|items| {
let items: js_sys::Object = items.into();
js_sys::Array::from(&items)
});
if let (Some(name), Some(raw_items)) = (name, items) {
let mut items = BTreeMap::new();
for item in raw_items.to_vec() {
let item = js_sys::Array::from(&item);
if let (Some(time), Some(id)) = (
item.get(0).as_f64().map(|t| OrderedFloat(t)),
U128Id::from_jsvalue(&item.get(1)).map(|id| field.block_id(id)),
) {
items.insert(time, id);
}
}
Some(Box::new(Self { name, items }))
} else {
None
}
} else {
None
};
Promise::new(move |resolve| resolve(self_))
}
fn dependents(&self, field: &Field) -> HashSet<BlockId> {
let mut deps = set! {};
for (_, block_id) in &self.items {
if let Some(block) = field.get::<super::Item>(block_id) {
let block_deps = block.dependents(field);
for block_dep in block_deps {
deps.insert(block_dep);
}
deps.insert(block_id.clone());
}
}
deps
}
fn resources(&self, field: &Field) -> HashSet<ResourceId> {
let mut reses = set! {};
for (_, block_id) in &self.items {
if let Some(block) = field.get::<super::Item>(block_id) {
let block_reses = block.resources(field);
for block_res in block_reses {
reses.insert(block_res);
}
}
}
reses
}
}
|
use crate::appkit::NSMenu;
use crate::base::id;
use crate::base::Sel;
use crate::foundation::NSString;
// https://developer.apple.com/documentation/appkit/nsmenuitem
#[derive(Clone, Copy, Debug)]
#[repr(C)]
pub struct NSMenuItem(pub(crate) id);
impl Default for NSMenuItem {
fn default() -> Self {
Self::new()
}
}
impl NSMenuItem {
pub fn separator() -> Self {
Self(unsafe { msg_send!(class!(NSMenuItem), separatorItem) })
}
pub fn terminator() -> Self {
Self::alloc().action(sel!(terminate:))
}
pub fn new() -> Self {
Self(unsafe { msg_send!(class!(NSMenuItem), new) })
}
pub fn alloc() -> Self {
Self(unsafe { msg_send!(class!(NSMenuItem), alloc) })
}
pub fn autorelease(self) -> Self {
Self(unsafe { msg_send!(self.0, autorelease) })
}
pub fn title<T: Into<NSString>>(self, title: T) -> Self {
unsafe {
let _: () = msg_send!(self.0, setTitle: title.into());
}
self
}
pub fn action(self, action: Sel) -> Self {
unsafe {
let _: () = msg_send!(self.0, setAction: action);
}
self
}
pub fn target(self, target: id) -> Self {
unsafe {
let _: () = msg_send!(self.0, setTarget: target);
}
self
}
pub fn submenu(self, menu: NSMenu) -> Self {
unsafe {
let _: () = msg_send!(self.0, setSubmenu: menu);
}
self
}
}
|
use libc::{c_int, c_void, getpid, pid_t};
use {Error, Protection, Region, Result};
pub fn get_region(address: *const u8) -> Result<Region> {
unsafe {
let mut vm_cnt = 0;
let vm = kinfo_getvmmap(getpid(), &mut vm_cnt);
if vm.is_null() {
return Err(Error::NullAddress);
}
for i in 0..vm_cnt {
// Since the struct size is given in the struct, we can use it to be future-proof
// (we won't need to update the definition here when new fields are added)
let entry = &*((vm as *const c_void).offset(i as isize * (*vm).kve_structsize as isize)
as *const kinfo_vmentry);
if address >= entry.kve_start as *const _ && address < entry.kve_end as *const _ {
return Ok(Region {
base: entry.kve_start as *const _,
size: (entry.kve_end - entry.kve_start) as _,
guarded: false,
protection: Protection::from_native(entry.kve_protection),
shared: entry.kve_type == KVME_TYPE_DEFAULT,
});
}
}
Err(Error::FreeMemory)
}
}
impl Protection {
fn from_native(protection: c_int) -> Self {
let mut result = Protection::None;
if (protection & KVME_PROT_READ) == KVME_PROT_READ {
result |= Protection::Read;
}
if (protection & KVME_PROT_WRITE) == KVME_PROT_WRITE {
result |= Protection::Write;
}
if (protection & KVME_PROT_EXEC) == KVME_PROT_EXEC {
result |= Protection::Execute;
}
result
}
}
#[repr(C)]
struct kinfo_vmentry {
kve_structsize: c_int,
kve_type: c_int,
kve_start: u64,
kve_end: u64,
kve_offset: u64,
kve_vn_fileid: u64,
kve_vn_fsid_freebsd11: u32,
kve_flags: c_int,
kve_resident: c_int,
kve_private_resident: c_int,
kve_protection: c_int,
}
const KVME_TYPE_DEFAULT: c_int = 1;
const KVME_PROT_READ: c_int = 1;
const KVME_PROT_WRITE: c_int = 2;
const KVME_PROT_EXEC: c_int = 4;
#[link(name = "util")]
extern "C" {
fn kinfo_getvmmap(pid: pid_t, cntp: *mut c_int) -> *mut kinfo_vmentry;
}
|
// Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use wasmlib::*;
//@formatter:off
pub struct Donation {
pub amount: i64, // amount donated
pub donator: ScAgentId, // who donated
pub error: String, // error to be reported to donator if anything goes wrong
pub feedback: String, // the feedback for the person donated to
pub timestamp: i64, // when the donation took place
}
//@formatter:on
impl Donation {
pub fn from_bytes(bytes: &[u8]) -> Donation {
let mut decode = BytesDecoder::new(bytes);
Donation {
amount: decode.int(),
donator: decode.agent_id(),
error: decode.string(),
feedback: decode.string(),
timestamp: decode.int(),
}
}
pub fn to_bytes(&self) -> Vec<u8> {
let mut encode = BytesEncoder::new();
encode.int(self.amount);
encode.agent_id(&self.donator);
encode.string(&self.error);
encode.string(&self.feedback);
encode.int(self.timestamp);
return encode.data();
}
}
|
extern crate typemap;
use self::typemap::Key;
use serenity::prelude::Mutex;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::SystemTime;
pub struct PlayerManager;
impl Key for PlayerManager {
type Value = Arc<Mutex<HashMap<String, Player>>>;
}
pub struct Player {
position: Vector3,
velocity: Vector3,
orientation: Vector3,
time: SystemTime
}
impl Player {
pub fn new() -> Player {
Player {
position: Vector3::new(0.0, 0.0, 0.0),
velocity: Vector3::new(0.0, 0.0, 0.0),
orientation: Vector3::new(0.0, 0.0, 0.0),
time: SystemTime::now()
}
}
pub fn set_position(&mut self, position: [f32; 3]) {
let oldtime = self.time.clone();
self.time = SystemTime::now();
let dif = self.time.duration_since(oldtime).unwrap();
let elapsed: f32 = (dif.as_secs() as f32) + (dif.subsec_nanos() as f32 / 1_000_000_000.0);
self.velocity = Vector3::new(
(position[0] - self.position.x) / elapsed,
(position[1] - self.position.y) / elapsed,
(position[2] - self.position.z) / elapsed
);
self.position.x = position[0];
self.position.y = position[1];
self.position.z = position[2];
}
pub fn get_position(&self) -> Vector3 { self.position.clone() }
pub fn get_velocity(&self) -> Vector3 { self.velocity.clone() }
pub fn get_orientation(&self) -> Vector3 { self.orientation.clone() }
}
#[derive(Copy)]
pub struct Vector3 {
pub x: f32,
pub y: f32,
pub z: f32
}
impl Vector3 {
pub fn new(x: f32, y: f32, z: f32) -> Vector3 {
Vector3 {
x: x,
y: y,
z: z
}
}
}
impl Clone for Vector3 {
fn clone(&self) -> Vector3 { *self }
}
|
use std::borrow::Cow;
use std::collections::{BTreeMap, HashMap};
use std::mem;
use std::sync::Arc;
use std::u32;
use gimli::Reader as GimliReader;
use object::{self, ObjectSection, ObjectSymbol};
use crate::cfi::{Cfi, CfiDirective};
use crate::file::{Architecture, Arena, DebugInfo, FileHash};
use crate::function::{
Function, FunctionDetails, FunctionOffset, InlinedFunction, Parameter, ParameterOffset,
};
use crate::location::{Location, Piece, Register};
use crate::namespace::{Namespace, NamespaceKind};
use crate::range::Range;
use crate::source::Source;
use crate::types::{
ArrayType, BaseType, BaseTypeEncoding, Endianity, EnumerationType, Enumerator, FunctionType,
Inherit, Member, MemberOffset, ParameterType, PointerToMemberType, StructType, SubrangeType,
Type, TypeDef, TypeKind, TypeModifier, TypeModifierKind, TypeOffset, UnionType,
UnspecifiedType, Variant, VariantPart,
};
use crate::unit::Unit;
use crate::variable::{LocalVariable, Variable, VariableOffset};
use crate::{Address, Id, Result, Size};
pub(crate) type RelocationMap = HashMap<usize, object::Relocation>;
fn add_relocations<'input, 'file, Object>(
relocations: &mut RelocationMap,
file: &'file Object,
section: &Object::Section,
) where
Object: object::Object<'input, 'file>,
{
for (offset64, mut relocation) in section.relocations() {
let offset = offset64 as usize;
if offset as u64 != offset64 {
continue;
}
let offset = offset as usize;
let target = match relocation.target() {
object::RelocationTarget::Symbol(index) => {
if let Ok(symbol) = file.symbol_by_index(index) {
symbol.address()
} else {
println!(
"Relocation with invalid symbol index {} for section {} at offset 0x{:08x}",
index.0,
section.name().unwrap(),
offset
);
continue;
}
}
object::RelocationTarget::Section(index) => {
if let Ok(section) = file.section_by_index(index) {
section.address()
} else {
println!(
"Relocation with invalid section index {} for section {} at offset 0x{:08x}",
index.0,
section.name().unwrap(),
offset
);
continue;
}
}
_ => {
continue;
}
};
match relocation.kind() {
object::RelocationKind::Absolute => {
let addend = target.wrapping_add(relocation.addend() as u64);
relocation.set_addend(addend as i64);
if relocations.insert(offset, relocation).is_some() {
println!(
"Multiple relocations for section {} at offset 0x{:08x}",
section.name().unwrap(),
offset
);
}
}
object::RelocationKind::Relative => {
let addend = target
.wrapping_add(relocation.addend() as u64)
.wrapping_sub(section.address())
.wrapping_sub(offset as u64);
relocation.set_addend(addend as i64);
if relocations.insert(offset, relocation).is_some() {
println!(
"Multiple relocations for section {} at offset 0x{:08x}",
section.name().unwrap(),
offset
);
}
}
_ => {
println!(
"Unsupported relocation for section {} at offset 0x{:08x}",
section.name().unwrap(),
offset
);
}
}
}
}
#[derive(Debug, Clone, Copy)]
struct Relocate<'a, R: gimli::Reader<Offset = usize>> {
relocations: &'a RelocationMap,
section: R,
reader: R,
}
impl<'a, R: gimli::Reader<Offset = usize>> Relocate<'a, R> {
fn relocate(&self, offset: usize, value: u64) -> u64 {
if let Some(relocation) = self.relocations.get(&offset) {
match relocation.kind() {
object::RelocationKind::Absolute | object::RelocationKind::Relative => {
if relocation.has_implicit_addend() {
// Use the explicit addend too, because it may have the symbol value.
return value.wrapping_add(relocation.addend() as u64);
} else {
return relocation.addend() as u64;
}
}
_ => {}
}
};
value
}
}
impl<'a, Endian> Relocate<'a, gimli::EndianSlice<'a, Endian>>
where
Endian: gimli::Endianity,
{
fn slice(&self) -> &'a [u8] {
self.reader.slice()
}
}
impl<'a, R: gimli::Reader<Offset = usize>> gimli::Reader for Relocate<'a, R> {
type Endian = R::Endian;
type Offset = R::Offset;
fn read_address(&mut self, address_size: u8) -> gimli::Result<u64> {
let offset = self.reader.offset_from(&self.section);
let value = self.reader.read_address(address_size)?;
Ok(self.relocate(offset, value))
}
fn read_length(&mut self, format: gimli::Format) -> gimli::Result<usize> {
let offset = self.reader.offset_from(&self.section);
let value = self.reader.read_length(format)?;
<usize as gimli::ReaderOffset>::from_u64(self.relocate(offset, value as u64))
}
fn read_offset(&mut self, format: gimli::Format) -> gimli::Result<usize> {
let offset = self.reader.offset_from(&self.section);
let value = self.reader.read_offset(format)?;
<usize as gimli::ReaderOffset>::from_u64(self.relocate(offset, value as u64))
}
fn read_sized_offset(&mut self, size: u8) -> gimli::Result<usize> {
let offset = self.reader.offset_from(&self.section);
let value = self.reader.read_sized_offset(size)?;
<usize as gimli::ReaderOffset>::from_u64(self.relocate(offset, value as u64))
}
#[inline]
fn split(&mut self, len: Self::Offset) -> gimli::Result<Self> {
let mut other = self.clone();
other.reader.truncate(len)?;
self.reader.skip(len)?;
Ok(other)
}
// All remaining methods simply delegate to `self.reader`.
#[inline]
fn endian(&self) -> Self::Endian {
self.reader.endian()
}
#[inline]
fn len(&self) -> Self::Offset {
self.reader.len()
}
#[inline]
fn empty(&mut self) {
self.reader.empty()
}
#[inline]
fn truncate(&mut self, len: Self::Offset) -> gimli::Result<()> {
self.reader.truncate(len)
}
#[inline]
fn offset_from(&self, base: &Self) -> Self::Offset {
self.reader.offset_from(&base.reader)
}
#[inline]
fn offset_id(&self) -> gimli::ReaderOffsetId {
self.reader.offset_id()
}
#[inline]
fn lookup_offset_id(&self, id: gimli::ReaderOffsetId) -> Option<Self::Offset> {
self.reader.lookup_offset_id(id)
}
#[inline]
fn find(&self, byte: u8) -> gimli::Result<Self::Offset> {
self.reader.find(byte)
}
#[inline]
fn skip(&mut self, len: Self::Offset) -> gimli::Result<()> {
self.reader.skip(len)
}
#[inline]
fn to_slice(&self) -> gimli::Result<Cow<[u8]>> {
self.reader.to_slice()
}
#[inline]
fn to_string(&self) -> gimli::Result<Cow<str>> {
self.reader.to_string()
}
#[inline]
fn to_string_lossy(&self) -> gimli::Result<Cow<str>> {
self.reader.to_string_lossy()
}
#[inline]
fn read_slice(&mut self, buf: &mut [u8]) -> gimli::Result<()> {
self.reader.read_slice(buf)
}
}
type Reader<'input, Endian> = Relocate<'input, gimli::EndianSlice<'input, Endian>>;
pub(crate) struct DwarfDebugInfo<'input, Endian>
where
Endian: gimli::Endianity,
{
endian: Endian,
read: gimli::Dwarf<Reader<'input, Endian>>,
frame: DwarfFrame<Reader<'input, Endian>>,
arena: &'input Arena,
units: Vec<gimli::Unit<Reader<'input, Endian>, usize>>,
}
impl<'input, Endian> DwarfDebugInfo<'input, Endian>
where
Endian: gimli::Endianity,
{
fn string(
&self,
dwarf_unit: &DwarfUnit<'input, Endian>,
value: gimli::AttributeValue<Reader<'input, Endian>>,
) -> Option<&'input str> {
self.read
.attr_string(dwarf_unit, value)
.map(|r| self.arena.add_string(r.slice()))
.ok()
}
fn tree(
&self,
offset: gimli::DebugInfoOffset,
) -> Option<(
&DwarfUnit<'input, Endian>,
gimli::EntriesTree<Reader<'input, Endian>>,
)> {
// FIXME: make this more efficient for large numbers of units
// FIXME: cache lookups
let offset = gimli::UnitSectionOffset::DebugInfoOffset(offset);
for unit in &self.units {
if let Some(offset) = offset.to_unit_offset(unit) {
let tree = unit.entries_tree(Some(offset)).ok()?;
return Some((unit, tree));
}
}
None
}
fn type_tree(
&self,
offset: TypeOffset,
) -> Option<(
&DwarfUnit<'input, Endian>,
gimli::EntriesTree<Reader<'input, Endian>>,
)> {
offset
.get()
.and_then(|offset| self.tree(gimli::DebugInfoOffset(offset)))
}
fn function_tree(
&self,
offset: FunctionOffset,
) -> Option<(
&DwarfUnit<'input, Endian>,
gimli::EntriesTree<Reader<'input, Endian>>,
)> {
offset
.get()
.and_then(|offset| self.tree(gimli::DebugInfoOffset(offset)))
}
pub(crate) fn get_type(&self, offset: TypeOffset) -> Option<Type<'input>> {
self.type_tree(offset).and_then(|(unit, mut tree)| {
let node = tree.root().ok()?;
parse_unnamed_type(self, unit, node).ok()?
})
}
pub(crate) fn get_enumerators(&self, offset: TypeOffset) -> Vec<Enumerator<'input>> {
self.type_tree(offset)
.and_then(|(unit, mut tree)| {
let node = tree.root().ok()?;
parse_enumerators(self, unit, node).ok()
})
.unwrap_or_default()
}
pub(crate) fn get_function_details(
&self,
offset: FunctionOffset,
hash: &FileHash<'input>,
) -> Option<FunctionDetails<'input>> {
self.function_tree(offset).and_then(|(unit, mut tree)| {
let node = tree.root().ok()?;
parse_subprogram_details(hash, self, unit, node).ok()
})
}
pub(crate) fn get_cfi(&self, range: Range) -> Vec<Cfi> {
self.frame.get_cfi(range).unwrap_or_default()
}
pub(crate) fn get_register_name(
&self,
machine: Architecture,
register: Register,
) -> Option<&'static str> {
let register_name = match machine {
Architecture::Arm => gimli::Arm::register_name,
Architecture::I386 => gimli::X86::register_name,
Architecture::X86_64 => gimli::X86_64::register_name,
_ => return None,
};
register_name(gimli::Register(register.0))
}
}
type DwarfUnit<'input, Endian> = gimli::Unit<Reader<'input, Endian>>;
struct DwarfSubprogram<'input> {
offset: gimli::UnitOffset,
specification: FunctionOffset,
abstract_origin: bool,
function: Function<'input>,
}
struct DwarfVariable<'input> {
offset: gimli::UnitOffset,
specification: Option<VariableOffset>,
variable: Variable<'input>,
}
pub(crate) fn parse<'input: 'file, 'file, Endian, Object>(
endian: Endian,
object: &'file Object,
arena: &'input Arena,
) -> Result<(Vec<Unit<'input>>, DebugInfo<'input, Endian>)>
where
Endian: gimli::Endianity,
Object: object::Object<'input, 'file>,
{
let get_section = |id: gimli::SectionId| -> Result<_> {
let mut relocations = RelocationMap::default();
let data = match object.section_by_name(id.name()) {
Some(ref section) => {
add_relocations(&mut relocations, object, section);
match section.uncompressed_data()? {
Cow::Borrowed(bytes) => bytes,
Cow::Owned(bytes) => arena.add_buffer(bytes),
}
}
None => &[],
};
let relocations = arena.add_relocations(Box::new(relocations));
let reader = gimli::EndianSlice::new(data, endian);
Ok(Relocate {
relocations,
section: reader,
reader,
})
};
let read = gimli::Dwarf::load(get_section)?;
let debug_frame = get_section(gimli::SectionId::DebugFrame)?;
let eh_frame = get_section(gimli::SectionId::EhFrame)?;
let mut bases = gimli::BaseAddresses::default();
if let Some(section) = object.section_by_name(".eh_frame") {
bases = bases.set_eh_frame(section.address());
}
if let Some(section) = object.section_by_name(".text") {
bases = bases.set_text(section.address());
}
if let Some(section) = object.section_by_name(".got") {
bases = bases.set_got(section.address());
}
let frame = DwarfFrame::new(debug_frame.into(), eh_frame.into(), bases);
let mut dwarf = DwarfDebugInfo {
endian,
read,
frame,
arena,
units: Vec::new(),
};
let mut units = Vec::new();
let mut unit_headers = dwarf.read.units();
while let Some(unit_header) = unit_headers.next()? {
let dwarf_unit = dwarf.read.unit(unit_header)?;
units.push(parse_unit(&mut dwarf, dwarf_unit)?);
}
Ok((units, DebugInfo::Dwarf(dwarf)))
}
fn parse_unit<'input, Endian>(
dwarf: &mut DwarfDebugInfo<'input, Endian>,
dwarf_unit: DwarfUnit<'input, Endian>,
) -> Result<Unit<'input>>
where
Endian: gimli::Endianity,
{
let mut unit = Unit::default();
let mut subprograms = Vec::new();
let mut variables = Vec::new();
let mut tree = dwarf_unit.entries_tree(None)?;
let root = tree.root()?;
let entry = root.entry();
if entry.tag() != gimli::DW_TAG_compile_unit {
return Err(format!("unknown CU tag: {}", entry.tag()).into());
}
let mut ranges = None;
let mut high_pc = None;
let mut size = None;
let mut attrs = entry.attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
unit.name = dwarf.string(&dwarf_unit, attr.value()).map(Cow::Borrowed);
}
gimli::DW_AT_comp_dir => {
unit.dir = dwarf.string(&dwarf_unit, attr.value()).map(Cow::Borrowed);
}
gimli::DW_AT_language => {
if let gimli::AttributeValue::Language(language) = attr.value() {
unit.language = Some(language);
}
}
gimli::DW_AT_low_pc => {
if let gimli::AttributeValue::Addr(addr) = attr.value() {
unit.low_pc = Some(addr);
}
}
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(val) => high_pc = Some(val),
gimli::AttributeValue::Udata(val) => size = Some(val),
val => debug!("unknown CU DW_AT_high_pc: {:?}", val),
},
gimli::DW_AT_ranges => {
if let gimli::AttributeValue::RangeListsRef(val) = attr.value() {
ranges = Some(val);
}
}
gimli::DW_AT_stmt_list
| gimli::DW_AT_producer
| gimli::DW_AT_entry_pc
| gimli::DW_AT_APPLE_optimized
| gimli::DW_AT_macro_info
| gimli::DW_AT_GNU_macros
| gimli::DW_AT_GNU_pubnames
| gimli::DW_AT_sibling => {}
_ => debug!("unknown CU attribute: {} {:?}", attr.name(), attr.value()),
}
}
// Find ranges from attributes in order of preference:
// DW_AT_stmt_list, DW_AT_ranges, DW_AT_high_pc, DW_AT_size.
// TODO: include variables in ranges.
// TODO: copy logic from addr2line
if let Some(program) = dwarf_unit.line_program.clone() {
let mut rows = program.rows();
let mut seq_addr = None;
while let Some((_, row)) = rows.next_row()? {
let addr = row.address();
if row.end_sequence() {
if let Some(seq_addr) = seq_addr {
// Sequences starting at 0 are probably invalid.
// TODO: is this always desired?
if seq_addr != 0 {
unit.ranges.push(Range {
begin: seq_addr,
end: addr,
});
}
}
seq_addr = None;
} else if seq_addr.is_none() {
seq_addr = Some(addr);
}
}
} else if let Some(offset) = ranges {
let offset = dwarf.read.ranges_offset_from_raw(&dwarf_unit, offset);
let mut ranges = dwarf.read.ranges(&dwarf_unit, offset)?;
while let Some(range) = ranges.next()? {
if range.begin < range.end {
unit.ranges.push(Range {
begin: range.begin,
end: range.end,
});
}
}
} else if let Some(low_pc) = unit.low_pc {
if let Some(size) = size {
if high_pc.is_none() {
high_pc = low_pc.checked_add(size);
}
}
if let Some(high_pc) = high_pc {
unit.ranges.push(Range {
begin: low_pc,
end: high_pc,
});
}
}
unit.ranges.sort();
// Ignore low_pc attribute if there is any range.
unit.low_pc = unit.ranges.list().first().map(|range| range.begin);
let namespace = None;
parse_namespace_children(
&mut unit,
dwarf,
&dwarf_unit,
&mut subprograms,
&mut variables,
&namespace,
root.children(),
)?;
fixup_subprogram_specifications(
&mut unit,
dwarf,
&dwarf_unit,
&mut subprograms,
&mut variables,
)?;
fixup_variable_specifications(&mut unit, dwarf, &dwarf_unit, &mut variables)?;
dwarf.units.push(dwarf_unit);
Ok(unit)
}
#[inline(never)]
fn fixup_subprogram_specifications<'input, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
// Convert functions to BTreeMap
// TODO: it'd be cleaner if parse_subprogram() added functions to a BTreeMap initially,
// so that we didn't have to keep updating it.
let mut functions = BTreeMap::new();
for function in unit.functions.drain(..) {
functions.insert(function.offset, function);
}
let mut defer = Vec::new();
while !subprograms.is_empty() {
let mut progress = false;
mem::swap(&mut defer, subprograms);
for mut subprogram in defer.drain(..) {
if inherit_subprogram(
&functions,
&mut subprogram.function,
subprogram.specification,
subprogram.abstract_origin,
) {
let mut tree = dwarf_unit.entries_tree(Some(subprogram.offset))?;
parse_subprogram_children(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&mut subprogram.function,
tree.root()?.children(),
)?;
let offset = subprogram.offset.to_unit_section_offset(dwarf_unit);
functions.insert(offset.into(), subprogram.function);
for function in unit.functions.drain(..) {
functions.insert(function.offset, function);
}
progress = true;
} else {
subprograms.push(subprogram);
}
}
if !progress {
debug!(
"invalid specification for {} subprograms",
subprograms.len()
);
mem::swap(&mut defer, subprograms);
for mut subprogram in defer.drain(..) {
let mut tree = dwarf_unit.entries_tree(Some(subprogram.offset))?;
parse_subprogram_children(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&mut subprogram.function,
tree.root()?.children(),
)?;
let offset = subprogram.offset.to_unit_section_offset(dwarf_unit);
functions.insert(offset.into(), subprogram.function);
for function in unit.functions.drain(..) {
functions.insert(function.offset, function);
}
}
// And keep going, because parse_subprogram_children() may have added more.
}
}
unit.functions = functions.into_iter().map(|(_, x)| x).collect();
Ok(())
}
#[inline(never)]
fn fixup_variable_specifications<'input, Endian>(
unit: &mut Unit<'input>,
_dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
variables: &mut Vec<DwarfVariable<'input>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
// Convert variables to BTreeMap
let mut variable_map = BTreeMap::new();
for variable in unit.variables.drain(..) {
variable_map.insert(variable.offset, variable);
}
loop {
let mut progress = false;
let mut defer = Vec::new();
for mut variable in variables.drain(..) {
match variable.specification.and_then(|v| variable_map.get(&v)) {
Some(specification) => {
let variable = &mut variable.variable;
variable.namespace = specification.namespace.clone();
if variable.name.is_none() {
variable.name = specification.name;
}
if variable.linkage_name.is_none() {
variable.linkage_name = specification.linkage_name;
}
if variable.ty.is_none() {
variable.ty = specification.ty;
}
}
None => {
defer.push(variable);
continue;
}
}
let offset = variable.offset.to_unit_section_offset(dwarf_unit);
variable_map.insert(offset.into(), variable.variable);
progress = true;
}
if defer.is_empty() {
break;
}
if !progress {
debug!("invalid specification for {} variables", defer.len());
for variable in variables.drain(..) {
let offset = variable.offset.to_unit_section_offset(dwarf_unit);
variable_map.insert(offset.into(), variable.variable);
}
break;
}
*variables = defer;
}
unit.variables = variable_map.into_iter().map(|(_, x)| x).collect();
Ok(())
}
fn parse_namespace_children<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
mut iter: gimli::EntriesTreeIter<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_namespace => {
parse_namespace(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
child,
)?;
}
gimli::DW_TAG_subprogram => {
parse_subprogram(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
child,
)?;
}
gimli::DW_TAG_variable => {
let variable = parse_variable(unit, dwarf, dwarf_unit, namespace.clone(), child)?;
if variable.specification.is_some() {
// Delay handling specification in case it comes later.
variables.push(variable);
} else {
unit.variables.push(variable.variable);
}
}
gimli::DW_TAG_dwarf_procedure
| gimli::DW_TAG_imported_declaration
| gimli::DW_TAG_imported_module => {}
tag => {
if !parse_type(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
child,
)? {
debug!("unknown namespace child tag: {}", tag);
}
}
}
}
Ok(())
}
fn parse_namespace<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut name = None;
let entry = node.entry();
let mut attrs = entry.attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_decl_file | gimli::DW_AT_decl_line | gimli::DW_AT_decl_column => {}
_ => debug!(
"unknown namespace attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let namespace = Some(Namespace::new(namespace, name, NamespaceKind::Namespace));
parse_namespace_children(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
node.children(),
)
}
/*
fn is_type_tag(tag: gimli::DwTag) -> bool {
match tag {
gimli::DW_TAG_typedef |
gimli::DW_TAG_class_type | gimli::DW_TAG_structure_type |
gimli::DW_TAG_union_type |
gimli::DW_TAG_enumeration_type |
gimli::DW_TAG_unspecified_type |
gimli::DW_TAG_base_type |
gimli::DW_TAG_array_type |
gimli::DW_TAG_subroutine_type |
gimli::DW_TAG_ptr_to_member_type |
gimli::DW_TAG_pointer_type |
gimli::DW_TAG_reference_type |
gimli::DW_TAG_const_type |
gimli::DW_TAG_packed_type |
gimli::DW_TAG_volatile_type |
gimli::DW_TAG_restrict_type |
gimli::DW_TAG_shared_type |
gimli::DW_TAG_rvalue_reference_type |
gimli::DW_TAG_atomic_type => true,
_ => false,
}
}
*/
fn parse_type<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<bool>
where
Endian: gimli::Endianity,
{
let tag = node.entry().tag();
let mut ty = Type::default();
let offset = node.entry().offset();
let offset = offset.to_unit_section_offset(dwarf_unit);
ty.offset = offset.into();
ty.kind = match tag {
gimli::DW_TAG_base_type => TypeKind::Base(parse_base_type(dwarf, dwarf_unit, node)?),
gimli::DW_TAG_typedef => TypeKind::Def(parse_typedef(dwarf, dwarf_unit, namespace, node)?),
// TODO: distinguish between class and structure
gimli::DW_TAG_class_type | gimli::DW_TAG_structure_type => {
TypeKind::Struct(parse_structure_type(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
node,
)?)
}
gimli::DW_TAG_union_type => TypeKind::Union(parse_union_type(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
node,
)?),
gimli::DW_TAG_enumeration_type => TypeKind::Enumeration(parse_enumeration_type(
ty.offset,
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
node,
)?),
gimli::DW_TAG_unspecified_type => {
TypeKind::Unspecified(parse_unspecified_type(dwarf, dwarf_unit, namespace, node)?)
}
// Parse unnamed types for validation, but don't store them.
_ => return parse_unnamed_type(dwarf, dwarf_unit, node).map(|x| x.is_some()),
};
unit.types.push(ty);
Ok(true)
}
fn parse_unnamed_type<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<Option<Type<'input>>>
where
Endian: gimli::Endianity,
{
let tag = node.entry().tag();
let mut ty = Type::default();
let offset = node.entry().offset();
let offset = offset.to_unit_section_offset(dwarf_unit);
ty.offset = offset.into();
ty.kind = match tag {
gimli::DW_TAG_array_type => TypeKind::Array(parse_array_type(dwarf, dwarf_unit, node)?),
gimli::DW_TAG_subrange_type => {
TypeKind::Subrange(parse_subrange_type(dwarf, dwarf_unit, node)?)
}
gimli::DW_TAG_subroutine_type => {
TypeKind::Function(parse_subroutine_type(dwarf, dwarf_unit, node)?)
}
gimli::DW_TAG_ptr_to_member_type => {
TypeKind::PointerToMember(parse_pointer_to_member_type(dwarf, dwarf_unit, node)?)
}
gimli::DW_TAG_pointer_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Pointer,
)?),
gimli::DW_TAG_reference_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Reference,
)?),
gimli::DW_TAG_const_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Const,
)?),
gimli::DW_TAG_packed_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Packed,
)?),
gimli::DW_TAG_volatile_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Volatile,
)?),
gimli::DW_TAG_restrict_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Restrict,
)?),
gimli::DW_TAG_shared_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Shared,
)?),
gimli::DW_TAG_rvalue_reference_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::RvalueReference,
)?),
gimli::DW_TAG_atomic_type => TypeKind::Modifier(parse_type_modifier(
dwarf,
dwarf_unit,
node,
TypeModifierKind::Atomic,
)?),
_ => return Ok(None),
};
Ok(Some(ty))
}
fn parse_type_modifier<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
kind: TypeModifierKind,
) -> Result<TypeModifier<'input>>
where
Endian: gimli::Endianity,
{
let mut modifier = TypeModifier {
kind,
ty: TypeOffset::none(),
name: None,
byte_size: Size::none(),
address_size: Some(u64::from(dwarf_unit.header.address_size())),
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
modifier.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
modifier.ty = offset;
}
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
modifier.byte_size = Size::new(byte_size);
}
}
gimli::DW_AT_artificial => {}
_ => debug!(
"unknown type modifier attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown type modifier child tag: {}", tag);
}
}
}
Ok(modifier)
}
fn parse_base_type<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<BaseType<'input>>
where
Endian: gimli::Endianity,
{
let mut ty = BaseType::default();
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
ty.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
ty.byte_size = Size::new(byte_size);
}
}
gimli::DW_AT_encoding => {
if let gimli::AttributeValue::Encoding(val) = attr.value() {
ty.encoding = match val {
gimli::DW_ATE_boolean => BaseTypeEncoding::Boolean,
gimli::DW_ATE_address => BaseTypeEncoding::Address,
gimli::DW_ATE_signed => BaseTypeEncoding::Signed,
gimli::DW_ATE_signed_char => BaseTypeEncoding::SignedChar,
gimli::DW_ATE_unsigned => BaseTypeEncoding::Unsigned,
gimli::DW_ATE_unsigned_char => BaseTypeEncoding::UnsignedChar,
gimli::DW_ATE_float => BaseTypeEncoding::Float,
_ => {
debug!("unknown base type encoding: {} {:?}", attr.name(), val);
BaseTypeEncoding::Other
}
}
}
}
gimli::DW_AT_endianity => {
if let gimli::AttributeValue::Endianity(val) = attr.value() {
ty.endianity = match val {
gimli::DW_END_default => Endianity::Default,
gimli::DW_END_big => Endianity::Big,
gimli::DW_END_little => Endianity::Little,
_ => {
debug!("unknown base type endianity: {} {:?}", attr.name(), val);
Endianity::Default
}
}
}
}
gimli::DW_AT_artificial | gimli::DW_AT_decimal_scale => {}
_ => debug!(
"unknown base type attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown base type child tag: {}", tag);
}
}
}
Ok(ty)
}
fn parse_typedef<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<TypeDef<'input>>
where
Endian: gimli::Endianity,
{
let mut typedef = TypeDef {
namespace: namespace.clone(),
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
typedef.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
typedef.ty = offset;
}
}
gimli::DW_AT_decl_file => {
parse_source_file(dwarf, dwarf_unit, &attr, &mut typedef.source)
}
gimli::DW_AT_decl_line => parse_source_line(&attr, &mut typedef.source),
gimli::DW_AT_decl_column => parse_source_column(&attr, &mut typedef.source),
gimli::DW_AT_alignment => {}
_ => debug!(
"unknown typedef attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown typedef child tag: {}", tag);
}
}
}
Ok(typedef)
}
fn parse_structure_type<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<StructType<'input>>
where
Endian: gimli::Endianity,
{
let mut ty = StructType {
namespace: namespace.clone(),
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
ty.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
ty.byte_size = Size::new(byte_size);
}
}
gimli::DW_AT_declaration => {
if let gimli::AttributeValue::Flag(flag) = attr.value() {
ty.declaration = flag;
}
}
gimli::DW_AT_decl_file => parse_source_file(dwarf, dwarf_unit, &attr, &mut ty.source),
gimli::DW_AT_decl_line => parse_source_line(&attr, &mut ty.source),
gimli::DW_AT_decl_column => parse_source_column(&attr, &mut ty.source),
gimli::DW_AT_containing_type | gimli::DW_AT_alignment | gimli::DW_AT_sibling => {}
_ => debug!(
"unknown struct attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let namespace = Some(Namespace::new(&ty.namespace, ty.name, NamespaceKind::Type));
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_subprogram => {
parse_subprogram(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)?;
}
gimli::DW_TAG_member => {
parse_member(&mut ty.members, unit, dwarf, dwarf_unit, &namespace, child)?;
}
gimli::DW_TAG_inheritance => {
parse_inheritance(&mut ty.inherits, dwarf_unit, child)?;
}
gimli::DW_TAG_variant_part => {
parse_variant_part(
&mut ty.members,
&mut ty.variant_parts,
unit,
dwarf,
dwarf_unit,
&namespace,
child,
)?;
}
gimli::DW_TAG_template_type_parameter
| gimli::DW_TAG_template_value_parameter
| gimli::DW_TAG_GNU_template_parameter_pack => {}
tag => {
if !parse_type(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)? {
debug!("unknown struct child tag: {}", tag);
}
}
}
}
Ok(ty)
}
fn parse_union_type<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<UnionType<'input>>
where
Endian: gimli::Endianity,
{
let mut ty = UnionType {
namespace: namespace.clone(),
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
ty.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
ty.byte_size = Size::new(byte_size);
}
}
gimli::DW_AT_declaration => {
if let gimli::AttributeValue::Flag(flag) = attr.value() {
ty.declaration = flag;
}
}
gimli::DW_AT_decl_file => parse_source_file(dwarf, dwarf_unit, &attr, &mut ty.source),
gimli::DW_AT_decl_line => parse_source_line(&attr, &mut ty.source),
gimli::DW_AT_decl_column => parse_source_column(&attr, &mut ty.source),
gimli::DW_AT_alignment | gimli::DW_AT_sibling => {}
_ => debug!(
"unknown union attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let namespace = Some(Namespace::new(&ty.namespace, ty.name, NamespaceKind::Type));
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_subprogram => {
parse_subprogram(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)?;
}
gimli::DW_TAG_member => {
parse_member(&mut ty.members, unit, dwarf, dwarf_unit, &namespace, child)?;
}
gimli::DW_TAG_template_type_parameter => {}
tag => {
if !parse_type(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)? {
debug!("unknown union child tag: {}", tag);
}
}
}
}
Ok(ty)
}
fn parse_variant_part<'input, 'abbrev, 'unit, 'tree, Endian>(
members: &mut Vec<Member<'input>>,
variant_parts: &mut Vec<VariantPart<'input>>,
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut variant_part = VariantPart::default();
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_discr => {
if let Some(offset) = parse_member_offset(dwarf_unit, &attr) {
variant_part.discr = offset;
}
}
gimli::DW_AT_sibling => {}
_ => debug!(
"unknown variant_part attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_member => {
// Treat any members in the variant_part the same as any other member.
// TODO: maybe set a field to indicate which variant part it belongs to.
parse_member(members, unit, dwarf, dwarf_unit, namespace, child)?;
}
gimli::DW_TAG_variant => {
parse_variant(
&mut variant_part.variants,
unit,
dwarf,
dwarf_unit,
namespace,
child,
)?;
}
tag => {
debug!("unknown variant_part child tag: {}", tag);
}
}
}
variant_parts.push(variant_part);
Ok(())
}
fn parse_variant<'input, 'abbrev, 'unit, 'tree, Endian>(
variants: &mut Vec<Variant<'input>>,
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut variant = Variant::default();
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_discr_value => {
if let Some(value) = attr.udata_value() {
variant.discr_value = Some(value);
}
}
gimli::DW_AT_sibling => {}
_ => debug!(
"unknown variant attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_member => {
parse_member(
&mut variant.members,
unit,
dwarf,
dwarf_unit,
namespace,
child,
)?;
}
// subrange type encountered here for Ada.
// TODO: not sure which types are valid here.
// Maybe need to call parse_type(), but don't have all the parameters
// needed for it yet.
gimli::DW_TAG_subrange_type => {
parse_subrange_type(dwarf, dwarf_unit, child)?;
}
tag => {
debug!("unknown variant child tag: {}", tag);
}
}
}
// Rust uses a single struct member for its variants, and the size of this
// struct is set to be the same as the size of the enum. This makes it hard
// to see the exact layout of the enum, so it's more helpful to treat the
// struct members as being owned by the variant instead.
if unit.language == Some(gimli::DW_LANG_Rust) && variant.members.len() == 1 {
if let Some(offset) = variant.members[0].ty.get() {
let offset = gimli::UnitSectionOffset::DebugInfoOffset(gimli::DebugInfoOffset(offset));
if let Some(offset) = offset.to_unit_offset(dwarf_unit) {
let mut tree = dwarf_unit.entries_tree(Some(offset))?;
let node = tree.root()?;
if node.entry().tag() == gimli::DW_TAG_structure_type {
// Rust gives the struct a name that matches the variant.
if let Some(attr) = node.entry().attr_value(gimli::DW_AT_name)? {
variant.name = dwarf.string(dwarf_unit, attr);
}
// Parse the struct's members as our own.
variant.members.clear();
let mut iter = node.children();
while let Some(child) = iter.next()? {
if child.entry().tag() == gimli::DW_TAG_member {
parse_member(
&mut variant.members,
unit,
dwarf,
dwarf_unit,
namespace,
child,
)?;
}
}
}
}
}
}
variants.push(variant);
Ok(())
}
fn parse_member<'input, 'abbrev, 'unit, 'tree, Endian>(
members: &mut Vec<Member<'input>>,
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut member = Member::default();
let offset = node.entry().offset();
let offset = offset.to_unit_section_offset(dwarf_unit);
member.offset = offset.into();
let mut bit_offset = None;
let mut byte_size = None;
let mut declaration = false;
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
member.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
member.ty = offset;
}
}
gimli::DW_AT_data_member_location => {
if let Some(offset) = parse_data_member_location(dwarf_unit, &attr) {
member.bit_offset = offset;
}
}
gimli::DW_AT_data_bit_offset => {
if let Some(bit_offset) = attr.udata_value() {
member.bit_offset = bit_offset;
}
}
gimli::DW_AT_bit_offset => {
bit_offset = attr.udata_value();
}
gimli::DW_AT_byte_size => {
byte_size = attr.udata_value();
}
gimli::DW_AT_bit_size => {
if let Some(bit_size) = attr.udata_value() {
member.bit_size = Size::new(bit_size);
}
}
gimli::DW_AT_declaration => {
declaration = true;
}
gimli::DW_AT_decl_file
| gimli::DW_AT_decl_line
| gimli::DW_AT_decl_column
| gimli::DW_AT_external
| gimli::DW_AT_accessibility
| gimli::DW_AT_artificial
| gimli::DW_AT_const_value
| gimli::DW_AT_alignment
| gimli::DW_AT_sibling => {}
_ => {
debug!(
"unknown member attribute: {} {:?}",
attr.name(),
attr.value()
);
}
}
}
if declaration {
// This is a C++ static data member. Parse it as a variable instead.
// Note: the DWARF 5 standard says static members should use DW_TAG_variable,
// but at least clang 3.7.1 uses DW_TAG_member.
let variable = parse_variable(unit, dwarf, dwarf_unit, namespace.clone(), node)?;
if variable.specification.is_some() {
debug!(
"specification on variable declaration at offset 0x{:x}",
variable.offset.0
);
}
unit.variables.push(variable.variable);
return Ok(());
}
if let (Some(bit_offset), Some(bit_size)) = (bit_offset, member.bit_size.get()) {
// DWARF version 2/3, but allowed in later versions for compatibility.
// The data member is a bit field contained in an anonymous object.
// member.bit_offset starts as the offset of the anonymous object.
// byte_size is the size of the anonymous object.
// bit_offset is the offset from the anonymous object MSB to the bit field MSB.
// bit_size is the size of the bit field.
if dwarf.endian.is_big_endian() {
// For big endian, the MSB is the first bit, so we simply add bit_offset,
// and byte_size is unneeded.
member.bit_offset = member.bit_offset.wrapping_add(bit_offset);
} else {
// For little endian, we have to work backwards, so we need byte_size.
if let Some(byte_size) = byte_size {
// First find the offset of the MSB of the anonymous object.
member.bit_offset = member.bit_offset.wrapping_add(byte_size * 8);
// Then go backwards to the LSB of the bit field.
member.bit_offset = member
.bit_offset
.wrapping_sub(bit_offset.wrapping_add(bit_size));
} else {
// DWARF version 2/3 says the byte_size can be inferred,
// but it is unclear when this would be useful.
// Delay implementing this until needed.
debug!("missing byte_size for bit field offset");
}
}
} else if byte_size.is_some() {
// TODO: should this set member.bit_size?
debug!("ignored member byte_size");
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown member child tag: {}", tag);
}
}
}
members.push(member);
Ok(())
}
fn parse_inheritance<'input, 'abbrev, 'unit, 'tree, Endian>(
inherits: &mut Vec<Inherit>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut inherit = Inherit::default();
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
inherit.ty = offset;
}
}
gimli::DW_AT_data_member_location => {
if let Some(offset) = parse_data_member_location(dwarf_unit, &attr) {
inherit.bit_offset = offset;
}
}
gimli::DW_AT_accessibility | gimli::DW_AT_virtuality | gimli::DW_AT_sibling => {}
_ => {
debug!(
"unknown inheritance attribute: {} {:?}",
attr.name(),
attr.value()
);
}
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown inheritance child tag: {}", tag);
}
}
}
inherits.push(inherit);
Ok(())
}
fn parse_data_member_location<Endian>(
dwarf_unit: &DwarfUnit<Endian>,
attr: &gimli::Attribute<Reader<Endian>>,
) -> Option<u64>
where
Endian: gimli::Endianity,
{
match attr.value() {
gimli::AttributeValue::Udata(v) => return Some(v * 8),
gimli::AttributeValue::Sdata(v) => {
if v >= 0 {
return Some((v as u64) * 8);
} else {
debug!("DW_AT_data_member_location is negative: {}", v)
}
}
gimli::AttributeValue::Exprloc(expr) => {
if let Some(offset) = evaluate_member_location(&dwarf_unit.header, expr) {
return Some(offset);
}
}
gimli::AttributeValue::LocationListsRef(offset) => {
if dwarf_unit.header.version() == 3 {
// HACK: while gimli is technically correct, in my experience this
// is more likely to be a constant. This can happen for large
// structs.
return Some(offset.0 as u64 * 8);
} else {
debug!("loclist for member: {:?}", attr.value());
}
}
_ => {
debug!("unknown DW_AT_data_member_location: {:?}", attr.value());
}
}
None
}
fn parse_enumeration_type<'input, 'abbrev, 'unit, 'tree, Endian>(
offset: TypeOffset,
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<EnumerationType<'input>>
where
Endian: gimli::Endianity,
{
let mut ty = EnumerationType {
offset,
namespace: namespace.clone(),
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
ty.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
ty.byte_size = Size::new(byte_size);
}
}
gimli::DW_AT_declaration => {
if let gimli::AttributeValue::Flag(flag) = attr.value() {
ty.declaration = flag;
}
}
gimli::DW_AT_decl_file => parse_source_file(dwarf, dwarf_unit, &attr, &mut ty.source),
gimli::DW_AT_decl_line => parse_source_line(&attr, &mut ty.source),
gimli::DW_AT_decl_column => parse_source_column(&attr, &mut ty.source),
gimli::DW_AT_sibling
| gimli::DW_AT_encoding
| gimli::DW_AT_type
| gimli::DW_AT_alignment
| gimli::DW_AT_enum_class => {}
_ => debug!(
"unknown enumeration attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let namespace = Some(Namespace::new(&ty.namespace, ty.name, NamespaceKind::Type));
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_subprogram => {
parse_subprogram(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)?;
}
gimli::DW_TAG_enumerator => {}
tag => {
debug!("unknown enumeration child tag: {}", tag);
}
}
}
Ok(ty)
}
fn parse_enumerators<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<Vec<Enumerator<'input>>>
where
Endian: gimli::Endianity,
{
let mut enumerators = Vec::new();
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_enumerator => {
enumerators.push(parse_enumerator(dwarf, dwarf_unit, child)?);
}
_ => {}
}
}
Ok(enumerators)
}
fn parse_enumerator<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<Enumerator<'input>>
where
Endian: gimli::Endianity,
{
let mut enumerator = Enumerator::default();
let mut attrs = node.entry().attrs();
while let Some(ref attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
enumerator.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_const_value => {
if let Some(value) = attr.sdata_value() {
enumerator.value = Some(value);
} else {
debug!("unknown enumerator const_value: {:?}", attr.value());
}
}
_ => debug!(
"unknown enumerator attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown enumerator child tag: {}", tag);
}
}
}
Ok(enumerator)
}
fn parse_array_type<'input, 'abbrev, 'unit, 'tree, Endian>(
_dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<ArrayType<'input>>
where
Endian: gimli::Endianity,
{
let mut array = ArrayType::default();
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
array.ty = offset;
}
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
array.byte_size = Size::new(byte_size);
}
}
gimli::DW_AT_name | gimli::DW_AT_GNU_vector | gimli::DW_AT_sibling => {}
_ => debug!(
"unknown array attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut counts = Vec::new();
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_subrange_type => {
let mut count = None;
let mut lower = None;
let mut upper = None;
let mut attrs = child.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_count => {
count = attr.udata_value();
}
gimli::DW_AT_lower_bound => {
lower = attr.udata_value();
}
gimli::DW_AT_upper_bound => {
upper = attr.udata_value();
}
gimli::DW_AT_type => {}
_ => debug!(
"unknown array subrange attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
if count.is_none() {
if let Some(upper) = upper {
// TODO: use default lower bound for language
let lower = lower.unwrap_or(0);
count = u64::checked_sub(upper, lower)
.and_then(|count| u64::checked_add(count, 1));
if count.is_none() {
debug!("overflow for array bound: {}", upper);
}
}
}
if let Some(count) = count {
counts.push(Size::new(count));
} else {
// Unknown dimensions.
counts.push(Size::none());
}
}
tag => {
debug!("unknown array child tag: {}", tag);
}
}
}
if counts.len() == 1 {
array.count = counts[0];
} else if !counts.is_empty() {
array.counts = counts.into_boxed_slice();
}
Ok(array)
}
fn parse_subrange_type<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<SubrangeType<'input>>
where
Endian: gimli::Endianity,
{
// TODO: lower bound default should depend on language
let mut subrange = SubrangeType::default();
let mut count = None;
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
subrange.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
subrange.ty = offset;
}
}
gimli::DW_AT_lower_bound => {
if let Some(lower) = attr.udata_value() {
subrange.lower = Some(lower);
}
}
gimli::DW_AT_upper_bound => {
if let Some(upper) = attr.udata_value() {
subrange.upper = Some(upper);
}
}
gimli::DW_AT_count => {
if let Some(v) = attr.udata_value() {
count = Some(v);
}
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
subrange.byte_size = Size::new(byte_size);
}
}
gimli::DW_AT_artificial => {}
_ => debug!(
"unknown subrange attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
if let (Some(lower), Some(count)) = (subrange.lower, count) {
subrange.upper = Some(lower + count);
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown subrange child tag: {}", tag);
}
}
}
Ok(subrange)
}
fn parse_subroutine_type<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<FunctionType<'input>>
where
Endian: gimli::Endianity,
{
let mut function = FunctionType {
// Go treats subroutine types as pointers.
// Not sure if this is valid for all languages.
byte_size: Size::new(u64::from(dwarf_unit.header.address_size())),
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
function.return_type = offset;
}
}
gimli::DW_AT_name | gimli::DW_AT_prototyped | gimli::DW_AT_sibling => {}
_ => debug!(
"unknown subroutine attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_formal_parameter => {
parse_parameter_type(&mut function.parameters, dwarf, dwarf_unit, child)?;
}
tag => {
debug!("unknown subroutine child tag: {}", tag);
}
}
}
Ok(function)
}
fn parse_unspecified_type<'input, 'abbrev, 'unit, 'tree, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<UnspecifiedType<'input>>
where
Endian: gimli::Endianity,
{
let mut ty = UnspecifiedType {
namespace: namespace.clone(),
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
ty.name = dwarf.string(dwarf_unit, attr.value());
}
_ => debug!(
"unknown unspecified type attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown unspecified type child tag: {}", tag);
}
}
}
Ok(ty)
}
fn parse_pointer_to_member_type<'input, 'abbrev, 'unit, 'tree, Endian>(
_dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<PointerToMemberType>
where
Endian: gimli::Endianity,
{
let mut ty = PointerToMemberType {
address_size: Some(u64::from(dwarf_unit.header.address_size())),
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
ty.ty = offset;
}
}
gimli::DW_AT_containing_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
ty.containing_ty = offset;
}
}
gimli::DW_AT_byte_size => {
if let Some(byte_size) = attr.udata_value() {
ty.byte_size = Size::new(byte_size);
}
}
_ => debug!(
"unknown ptr_to_member type attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown ptr_to_member type child tag: {}", tag);
}
}
}
Ok(ty)
}
fn parse_subprogram<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let offset = node.entry().offset();
let mut function = Function {
id: Id::new(0),
offset: offset.to_unit_section_offset(dwarf_unit).into(),
namespace: namespace.clone(),
name: None,
symbol_name: None,
linkage_name: None,
source: Source::default(),
address: Address::none(),
size: Size::none(),
ranges: Vec::new(),
inline: false,
declaration: false,
parameters: Vec::new(),
return_type: TypeOffset::none(),
};
let mut specification = None;
let mut abstract_origin = false;
let mut high_pc = None;
let mut size = None;
let mut ranges = None;
let entry = node.entry();
let mut attrs = entry.attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
function.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
function.linkage_name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_decl_file => {
parse_source_file(dwarf, dwarf_unit, &attr, &mut function.source)
}
gimli::DW_AT_decl_line => parse_source_line(&attr, &mut function.source),
gimli::DW_AT_decl_column => parse_source_column(&attr, &mut function.source),
gimli::DW_AT_inline => {
if let gimli::AttributeValue::Inline(val) = attr.value() {
match val {
gimli::DW_INL_inlined | gimli::DW_INL_declared_inlined => {
function.inline = true
}
_ => function.inline = false,
}
}
}
gimli::DW_AT_low_pc => {
if let gimli::AttributeValue::Addr(addr) = attr.value() {
if addr != 0 || unit.low_pc == Some(0) {
function.address = Address::new(addr);
}
}
}
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(addr) => high_pc = Some(addr),
gimli::AttributeValue::Udata(val) => {
if val != 0 {
size = Some(val);
}
}
_ => {}
},
gimli::DW_AT_ranges => {
if let gimli::AttributeValue::RangeListsRef(val) = attr.value() {
ranges = Some(val);
}
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
function.return_type = offset;
}
}
gimli::DW_AT_specification | gimli::DW_AT_abstract_origin => {
if let Some(offset) = parse_function_offset(dwarf_unit, &attr) {
specification = Some(offset);
abstract_origin = attr.name() == gimli::DW_AT_abstract_origin;
}
}
gimli::DW_AT_declaration => {
if let gimli::AttributeValue::Flag(flag) = attr.value() {
function.declaration = flag;
}
}
gimli::DW_AT_frame_base => {
// FIXME
}
gimli::DW_AT_external
| gimli::DW_AT_call_all_calls
| gimli::DW_AT_call_all_tail_calls
| gimli::DW_AT_GNU_all_call_sites
| gimli::DW_AT_GNU_all_tail_call_sites
| gimli::DW_AT_prototyped
| gimli::DW_AT_accessibility
| gimli::DW_AT_explicit
| gimli::DW_AT_artificial
| gimli::DW_AT_object_pointer
| gimli::DW_AT_virtuality
| gimli::DW_AT_vtable_elem_location
| gimli::DW_AT_containing_type
| gimli::DW_AT_main_subprogram
| gimli::DW_AT_noreturn
| gimli::DW_AT_APPLE_optimized
| gimli::DW_AT_APPLE_omit_frame_ptr
| gimli::DW_AT_sibling => {}
_ => debug!(
"unknown subprogram attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
if let Some(offset) = ranges {
let offset = dwarf.read.ranges_offset_from_raw(dwarf_unit, offset);
let mut ranges = dwarf.read.ranges(dwarf_unit, offset)?;
let mut size = 0;
while let Some(range) = ranges.next()? {
if range.end > range.begin {
size += range.end - range.begin;
function.ranges.push(Range {
begin: range.begin,
end: range.end,
});
}
}
function.size = Size::new(size);
function.address = Address::new(function.ranges.first().map(|r| r.begin).unwrap_or(0));
} else if let Some(address) = function.address.get() {
if let Some(high_pc) = high_pc {
if high_pc > address {
function.size = Size::new(high_pc - address);
function.ranges.push(Range {
begin: address,
end: high_pc,
});
}
} else if let Some(size) = size {
function.size = Size::new(size);
function.ranges.push(Range {
begin: address,
end: address.wrapping_add(size),
});
}
}
if let Some(specification) = specification {
subprograms.push(DwarfSubprogram {
offset,
specification,
abstract_origin,
function,
});
return Ok(());
}
parse_subprogram_children(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&mut function,
node.children(),
)?;
unit.functions.push(function);
Ok(())
}
fn inherit_subprogram<'input>(
functions: &BTreeMap<FunctionOffset, Function<'input>>,
function: &mut Function<'input>,
specification: FunctionOffset,
abstract_origin: bool,
) -> bool {
let specification = match functions.get(&specification) {
Some(val) => val,
None => return false,
};
function.namespace = specification.namespace.clone();
if function.name.is_none() {
function.name = specification.name;
}
if function.linkage_name.is_none() {
function.linkage_name = specification.linkage_name;
}
if function.source.is_none() {
function.source = specification.source.clone();
}
if function.return_type.is_none() {
function.return_type = specification.return_type;
}
if abstract_origin {
// We inherit all children, and then extend them when parsing our children.
function.parameters = specification.parameters.clone();
} else {
// TODO: inherit children from specifications?
}
true
}
fn parse_subprogram_children<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
function: &mut Function<'input>,
mut iter: gimli::EntriesTreeIter<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let namespace = Some(Namespace::new(
&function.namespace,
function.name,
NamespaceKind::Function,
));
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_formal_parameter => {
parse_parameter_type(&mut function.parameters, dwarf, dwarf_unit, child)?;
}
gimli::DW_TAG_variable => {
// Handled in details.
}
gimli::DW_TAG_inlined_subroutine => {
parse_inlined_subroutine(child)?;
}
gimli::DW_TAG_lexical_block => {
parse_lexical_block(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)?;
}
gimli::DW_TAG_subprogram => {
parse_subprogram(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)?;
}
gimli::DW_TAG_unspecified_parameters
| gimli::DW_TAG_template_type_parameter
| gimli::DW_TAG_template_value_parameter
| gimli::DW_TAG_GNU_template_parameter_pack
| gimli::DW_TAG_label
| gimli::DW_TAG_imported_declaration
| gimli::DW_TAG_imported_module
| gimli::DW_TAG_call_site
| gimli::DW_TAG_GNU_call_site => {}
tag => {
if !parse_type(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
&namespace,
child,
)? {
debug!("unknown subprogram child tag: {}", tag);
}
}
}
}
Ok(())
}
fn parse_parameter_type<'input, 'abbrev, 'unit, 'tree, Endian>(
parameters: &mut Vec<ParameterType<'input>>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut parameter = ParameterType::default();
let offset = node.entry().offset();
let offset = offset.to_unit_section_offset(dwarf_unit);
parameter.offset = offset.into();
let mut abstract_origin = None;
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_abstract_origin => {
if let Some(offset) = parse_parameter_offset(dwarf_unit, &attr) {
abstract_origin = Some(offset);
}
}
gimli::DW_AT_name => {
parameter.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
parameter.ty = offset;
}
}
gimli::DW_AT_location
| gimli::DW_AT_decl_file
| gimli::DW_AT_decl_line
| gimli::DW_AT_decl_column
| gimli::DW_AT_artificial
| gimli::DW_AT_const_value
| gimli::DW_AT_GNU_locviews
| gimli::DW_AT_sibling => {}
_ => debug!(
"unknown parameter attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown parameter child tag: {}", tag);
}
}
}
if let Some(abstract_origin) = abstract_origin {
// TODO: use a hash?
if let Some(index) = parameters.iter().position(|x| x.offset == abstract_origin) {
let p = &mut parameters[index];
if parameter.name.is_some() {
p.name = parameter.name;
}
if parameter.ty.is_some() {
p.ty = parameter.ty;
}
return Ok(());
} else {
let unit_offset = offset
.to_unit_offset(dwarf_unit)
.unwrap_or(gimli::UnitOffset(0));
let offset = match offset {
gimli::UnitSectionOffset::DebugInfoOffset(offset) => offset.0,
_ => panic!("unexpected offset"),
};
let header_offset = match dwarf_unit.header.offset() {
gimli::UnitSectionOffset::DebugInfoOffset(offset) => offset.0,
_ => panic!("unexpected offset"),
};
debug!(
"missing parameter abstract origin: 0x{:08x}(0x{:08x}+0x{:08x})",
offset, header_offset, unit_offset.0
);
}
}
parameters.push(parameter);
Ok(())
}
fn parse_parameter<'input, 'abbrev, 'unit, 'tree, Endian>(
parameters: &mut Vec<Parameter<'input>>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut parameter = Parameter::default();
let offset = node.entry().offset();
let offset = offset.to_unit_section_offset(dwarf_unit);
parameter.offset = offset.into();
let mut abstract_origin = None;
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_abstract_origin => {
if let Some(offset) = parse_parameter_offset(dwarf_unit, &attr) {
abstract_origin = Some(offset);
}
}
gimli::DW_AT_name => {
parameter.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
parameter.ty = offset;
}
}
gimli::DW_AT_location => {
match attr.value() {
gimli::AttributeValue::Exprloc(expr) => {
evaluate_parameter_location(
&dwarf_unit.header,
Range::all(),
expr,
&mut parameter,
);
}
gimli::AttributeValue::LocationListsRef(offset) => {
let mut locations = dwarf.read.locations(dwarf_unit, offset)?;
while let Some(location) = locations.next()? {
// TODO: use location.range too
evaluate_parameter_location(
&dwarf_unit.header,
location.range.into(),
location.data,
&mut parameter,
);
}
}
_ => {
debug!("unknown parameter DW_AT_location: {:?}", attr.value());
}
}
}
gimli::DW_AT_decl_file
| gimli::DW_AT_decl_line
| gimli::DW_AT_decl_column
| gimli::DW_AT_artificial
| gimli::DW_AT_const_value
| gimli::DW_AT_GNU_locviews
| gimli::DW_AT_sibling => {}
_ => debug!(
"unknown parameter attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown parameter child tag: {}", tag);
}
}
}
if let Some(abstract_origin) = abstract_origin {
// TODO: use a hash?
if let Some(index) = parameters.iter().position(|x| x.offset == abstract_origin) {
let p = &mut parameters[index];
if parameter.name.is_some() {
p.name = parameter.name;
}
if parameter.ty.is_some() {
p.ty = parameter.ty;
}
if !parameter.locations.is_empty() {
p.locations.extend(¶meter.locations);
}
return Ok(());
} else {
let unit_offset = offset
.to_unit_offset(dwarf_unit)
.unwrap_or(gimli::UnitOffset(0));
let offset = match offset {
gimli::UnitSectionOffset::DebugInfoOffset(offset) => offset.0,
_ => panic!("unexpected offset"),
};
let header_offset = match dwarf_unit.header.offset() {
gimli::UnitSectionOffset::DebugInfoOffset(offset) => offset.0,
_ => panic!("unexpected offset"),
};
debug!(
"missing parameter abstract origin: 0x{:08x}(0x{:08x}+0x{:08x})",
offset, header_offset, unit_offset.0
);
}
}
parameters.push(parameter);
Ok(())
}
fn parse_lexical_block<'input, 'abbrev, 'unit, 'tree, Endian>(
unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
subprograms: &mut Vec<DwarfSubprogram<'input>>,
variables: &mut Vec<DwarfVariable<'input>>,
namespace: &Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_low_pc
| gimli::DW_AT_high_pc
| gimli::DW_AT_ranges
| gimli::DW_AT_abstract_origin
| gimli::DW_AT_sibling => {}
_ => debug!(
"unknown lexical_block attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_variable => {
// Handled in details.
}
gimli::DW_TAG_inlined_subroutine => {
parse_inlined_subroutine(child)?;
}
gimli::DW_TAG_lexical_block => {
parse_lexical_block(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
child,
)?;
}
gimli::DW_TAG_subprogram => {
parse_subprogram(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
child,
)?;
}
gimli::DW_TAG_formal_parameter
| gimli::DW_TAG_label
| gimli::DW_TAG_imported_declaration
| gimli::DW_TAG_imported_module
| gimli::DW_TAG_call_site
| gimli::DW_TAG_GNU_call_site => {}
tag => {
if !parse_type(
unit,
dwarf,
dwarf_unit,
subprograms,
variables,
namespace,
child,
)? {
debug!("unknown lexical_block child tag: {}", tag);
}
}
}
}
Ok(())
}
// Only checks for unknown attributes and tags.
fn parse_inlined_subroutine<Endian>(node: gimli::EntriesTreeNode<Reader<Endian>>) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_formal_parameter | gimli::DW_TAG_variable => {
// Handled in details.
}
gimli::DW_TAG_inlined_subroutine => {
parse_inlined_subroutine(child)?;
}
gimli::DW_TAG_lexical_block => {
parse_inlined_lexical_block(child)?;
}
gimli::DW_TAG_label | gimli::DW_TAG_call_site | gimli::DW_TAG_GNU_call_site => {}
tag => {
debug!("unknown inlined_subroutine child tag: {}", tag);
}
}
}
Ok(())
}
// Only checks for unknown attributes and tags.
fn parse_inlined_lexical_block<Endian>(node: gimli::EntriesTreeNode<Reader<Endian>>) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_low_pc
| gimli::DW_AT_high_pc
| gimli::DW_AT_ranges
| gimli::DW_AT_abstract_origin
| gimli::DW_AT_sibling => {}
_ => debug!(
"unknown inlined lexical_block attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_inlined_subroutine => {
parse_inlined_subroutine(child)?;
}
gimli::DW_TAG_lexical_block => {
parse_inlined_lexical_block(child)?;
}
gimli::DW_TAG_formal_parameter
| gimli::DW_TAG_variable
| gimli::DW_TAG_label
| gimli::DW_TAG_call_site
| gimli::DW_TAG_GNU_call_site
| gimli::DW_TAG_imported_module => {}
tag => {
debug!("unknown inlined lexical_block child tag: {}", tag);
}
}
}
Ok(())
}
fn parse_subprogram_details<'input, 'abbrev, 'unit, 'tree, Endian>(
hash: &FileHash<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<FunctionDetails<'input>>
where
Endian: gimli::Endianity,
{
let mut abstract_origin = None;
let entry = node.entry();
let mut attrs = entry.attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_abstract_origin => {
if let Some(offset) = parse_function_offset(dwarf_unit, &attr) {
abstract_origin = Some(offset);
}
}
_ => {}
}
}
// FIXME: limit recursion
let mut details = abstract_origin
.and_then(|offset| dwarf.get_function_details(offset, hash))
.unwrap_or_else(|| FunctionDetails {
parameters: Vec::new(),
variables: Vec::new(),
inlined_functions: Vec::new(),
});
parse_subprogram_children_details(hash, dwarf, dwarf_unit, &mut details, node.children())?;
Ok(details)
}
fn parse_subprogram_children_details<'input, 'abbrev, 'unit, 'tree, Endian>(
hash: &FileHash<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
function: &mut FunctionDetails<'input>,
mut iter: gimli::EntriesTreeIter<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_formal_parameter => {
parse_parameter(&mut function.parameters, dwarf, dwarf_unit, child)?;
}
gimli::DW_TAG_variable => {
parse_local_variable(&mut function.variables, dwarf, dwarf_unit, child)?;
}
gimli::DW_TAG_inlined_subroutine => {
function
.inlined_functions
.push(parse_inlined_subroutine_details(
hash, dwarf, dwarf_unit, child,
)?);
}
gimli::DW_TAG_lexical_block => {
parse_lexical_block_details(
&mut function.inlined_functions,
&mut function.variables,
hash,
dwarf,
dwarf_unit,
child,
)?;
}
// Checking for unknown tags is done in `parse_subprogram_children`.
_ => {}
}
}
Ok(())
}
fn parse_lexical_block_details<'input, 'abbrev, 'unit, 'tree, Endian>(
inlined_functions: &mut Vec<InlinedFunction<'input>>,
local_variables: &mut Vec<LocalVariable<'input>>,
hash: &FileHash<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
// Checking for unknown attributes is done in `parse_lexical_block`.
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_variable => {
parse_local_variable(local_variables, dwarf, dwarf_unit, child)?;
}
gimli::DW_TAG_inlined_subroutine => {
inlined_functions.push(parse_inlined_subroutine_details(
hash, dwarf, dwarf_unit, child,
)?);
}
gimli::DW_TAG_lexical_block => {
parse_lexical_block_details(
inlined_functions,
local_variables,
hash,
dwarf,
dwarf_unit,
child,
)?;
}
// Checking for unknown tags is done in `parse_lexical_block`.
_ => {}
}
}
Ok(())
}
fn parse_inlined_subroutine_details<'input, 'abbrev, 'unit, 'tree, Endian>(
hash: &FileHash<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<InlinedFunction<'input>>
where
Endian: gimli::Endianity,
{
let mut function = InlinedFunction::default();
let mut low_pc = None;
let mut high_pc = None;
let mut size = None;
let mut ranges = None;
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_abstract_origin => {
if let Some(offset) = parse_function_offset(dwarf_unit, &attr) {
function.abstract_origin = offset;
}
}
gimli::DW_AT_low_pc => {
if let gimli::AttributeValue::Addr(addr) = attr.value() {
low_pc = Some(addr);
}
}
gimli::DW_AT_high_pc => match attr.value() {
gimli::AttributeValue::Addr(addr) => high_pc = Some(addr),
gimli::AttributeValue::Udata(val) => size = Some(val),
_ => {}
},
gimli::DW_AT_ranges => {
if let gimli::AttributeValue::RangeListsRef(val) = attr.value() {
ranges = Some(val);
}
}
gimli::DW_AT_call_file => {
parse_source_file(dwarf, dwarf_unit, &attr, &mut function.call_source)
}
gimli::DW_AT_call_line => parse_source_line(&attr, &mut function.call_source),
gimli::DW_AT_call_column => parse_source_column(&attr, &mut function.call_source),
gimli::DW_AT_entry_pc | gimli::DW_AT_sibling => {}
_ => debug!(
"unknown inlined_subroutine attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
if function.abstract_origin.is_some() {
if let Some(details) = dwarf.get_function_details(function.abstract_origin, hash) {
function.parameters = details.parameters;
function.variables = details.variables;
if !function.inlined_functions.is_empty() {
debug!("abstract origin with inlined functions");
}
} else {
debug!("inlined_subroutine with invalid abstract origin");
}
} else {
debug!("inlined_subroutine with no abstract origin");
}
if let Some(offset) = ranges {
let mut size = 0;
let offset = dwarf.read.ranges_offset_from_raw(dwarf_unit, offset);
let mut ranges = dwarf.read.ranges(dwarf_unit, offset)?;
while let Some(range) = ranges.next()? {
size += range.end.wrapping_sub(range.begin);
}
function.size = Size::new(size);
} else if let Some(size) = size {
function.size = Size::new(size);
} else if let (Some(low_pc), Some(high_pc)) = (low_pc, high_pc) {
function.size = Size::new(high_pc.wrapping_sub(low_pc));
} else {
debug!("unknown inlined_subroutine size");
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
gimli::DW_TAG_formal_parameter => {
parse_parameter(&mut function.parameters, dwarf, dwarf_unit, child)?;
}
gimli::DW_TAG_variable => {
parse_local_variable(&mut function.variables, dwarf, dwarf_unit, child)?;
}
gimli::DW_TAG_inlined_subroutine => {
function
.inlined_functions
.push(parse_inlined_subroutine_details(
hash, dwarf, dwarf_unit, child,
)?);
}
gimli::DW_TAG_lexical_block => {
parse_lexical_block_details(
&mut function.inlined_functions,
&mut function.variables,
hash,
dwarf,
dwarf_unit,
child,
)?;
}
gimli::DW_TAG_label | gimli::DW_TAG_call_site | gimli::DW_TAG_GNU_call_site => {}
tag => {
debug!("unknown inlined_subroutine child tag: {}", tag);
}
}
}
Ok(function)
}
fn parse_variable<'input, 'abbrev, 'unit, 'tree, Endian>(
_unit: &mut Unit<'input>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
namespace: Option<Arc<Namespace<'input>>>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<DwarfVariable<'input>>
where
Endian: gimli::Endianity,
{
let offset = node.entry().offset();
let mut specification = None;
let mut variable = Variable {
offset: offset.to_unit_section_offset(dwarf_unit).into(),
namespace,
..Default::default()
};
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_name => {
variable.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_linkage_name | gimli::DW_AT_MIPS_linkage_name => {
variable.linkage_name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
variable.ty = offset;
}
}
gimli::DW_AT_specification => {
if let Some(offset) = parse_variable_offset(dwarf_unit, &attr) {
specification = Some(offset);
}
}
gimli::DW_AT_declaration => {
if let gimli::AttributeValue::Flag(flag) = attr.value() {
variable.declaration = flag;
}
}
gimli::DW_AT_decl_file => {
parse_source_file(dwarf, dwarf_unit, &attr, &mut variable.source)
}
gimli::DW_AT_decl_line => parse_source_line(&attr, &mut variable.source),
gimli::DW_AT_decl_column => parse_source_column(&attr, &mut variable.source),
gimli::DW_AT_location => match attr.value() {
gimli::AttributeValue::Exprloc(expr) => {
if let Some((address, size)) =
evaluate_variable_location(&dwarf_unit.header, expr)
{
variable.address = address;
if size.is_some() {
variable.size = size;
}
}
}
gimli::AttributeValue::LocationListsRef(..) => {
debug!("loclist for variable: {:?}", attr.value());
}
_ => {
debug!("unknown variable DW_AT_location: {:?}", attr.value());
}
},
gimli::DW_AT_abstract_origin
| gimli::DW_AT_artificial
| gimli::DW_AT_const_value
| gimli::DW_AT_external
| gimli::DW_AT_accessibility
| gimli::DW_AT_alignment => {}
_ => debug!(
"unknown variable attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown variable child tag: {}", tag);
}
}
}
Ok(DwarfVariable {
offset,
specification,
variable,
})
}
fn parse_local_variable<'input, 'abbrev, 'unit, 'tree, Endian>(
variables: &mut Vec<LocalVariable<'input>>,
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
node: gimli::EntriesTreeNode<'abbrev, 'unit, 'tree, Reader<'input, Endian>>,
) -> Result<()>
where
Endian: gimli::Endianity,
{
let mut variable = LocalVariable::default();
let offset = node.entry().offset();
let offset = offset.to_unit_section_offset(dwarf_unit);
variable.offset = offset.into();
let mut abstract_origin = None;
let mut attrs = node.entry().attrs();
while let Some(attr) = attrs.next()? {
match attr.name() {
gimli::DW_AT_abstract_origin => {
if let Some(offset) = parse_variable_offset(dwarf_unit, &attr) {
abstract_origin = Some(offset);
}
}
gimli::DW_AT_name => {
variable.name = dwarf.string(dwarf_unit, attr.value());
}
gimli::DW_AT_type => {
if let Some(offset) = parse_type_offset(dwarf_unit, &attr) {
variable.ty = offset;
}
}
gimli::DW_AT_decl_file => {
parse_source_file(dwarf, dwarf_unit, &attr, &mut variable.source)
}
gimli::DW_AT_decl_line => parse_source_line(&attr, &mut variable.source),
gimli::DW_AT_decl_column => parse_source_column(&attr, &mut variable.source),
gimli::DW_AT_location => {
match attr.value() {
gimli::AttributeValue::Exprloc(expr) => {
evaluate_local_variable_location(
&dwarf_unit.header,
Range::all(),
expr,
&mut variable,
);
}
gimli::AttributeValue::LocationListsRef(offset) => {
let mut locations = dwarf.read.locations(dwarf_unit, offset)?;
while let Some(location) = locations.next()? {
// TODO: use location.range too
evaluate_local_variable_location(
&dwarf_unit.header,
location.range.into(),
location.data,
&mut variable,
);
}
}
_ => {
debug!("unknown local variable DW_AT_location: {:?}", attr.value());
}
}
}
gimli::DW_AT_alignment
| gimli::DW_AT_artificial
| gimli::DW_AT_const_value
| gimli::DW_AT_external
| gimli::DW_AT_GNU_locviews => {}
_ => debug!(
"unknown local variable attribute: {} {:?}",
attr.name(),
attr.value()
),
}
}
let mut iter = node.children();
while let Some(child) = iter.next()? {
match child.entry().tag() {
tag => {
debug!("unknown variable child tag: {}", tag);
}
}
}
if let Some(abstract_origin) = abstract_origin {
// TODO: use a hash?
if let Some(index) = variables.iter().position(|x| x.offset == abstract_origin) {
let v = &mut variables[index];
if variable.name.is_some() {
v.name = variable.name;
}
if variable.ty.is_some() {
v.ty = variable.ty;
}
if variable.source.is_some() {
v.source = variable.source;
}
if variable.address.is_some() {
v.address = variable.address;
}
if variable.size.is_some() {
v.size = variable.size;
}
if !variable.locations.is_empty() {
v.locations.extend(&variable.locations);
}
return Ok(());
} else {
let unit_offset = offset
.to_unit_offset(dwarf_unit)
.unwrap_or(gimli::UnitOffset(0));
let offset = match offset {
gimli::UnitSectionOffset::DebugInfoOffset(offset) => offset.0,
_ => panic!("unexpected offset"),
};
let header_offset = match dwarf_unit.header.offset() {
gimli::UnitSectionOffset::DebugInfoOffset(offset) => offset.0,
_ => panic!("unexpected offset"),
};
debug!(
"missing variable abstract origin: 0x{:08x}(0x{:08x}+0x{:08x})",
offset, header_offset, unit_offset.0
);
}
}
variables.push(variable);
Ok(())
}
fn evaluate_member_location<'input, Endian>(
unit: &gimli::UnitHeader<Reader<'input, Endian>>,
expression: gimli::Expression<Reader<'input, Endian>>,
) -> Option<u64>
where
Endian: gimli::Endianity,
{
let pieces = evaluate(unit, expression, true);
if pieces.len() != 1 {
debug!("unsupported number of evaluation pieces: {:?}", pieces);
return None;
}
match pieces[0].location {
gimli::Location::Address { address } => Some(address * 8),
gimli::Location::Register { .. } => None,
_ => {
debug!("unknown DW_AT_data_member_location result: {:?}", pieces);
None
}
}
}
fn evaluate_variable_location<'input, Endian>(
unit: &gimli::UnitHeader<Reader<'input, Endian>>,
expression: gimli::Expression<Reader<'input, Endian>>,
) -> Option<(Address, Size)>
where
Endian: gimli::Endianity,
{
let pieces = evaluate(unit, expression, false);
let mut result = None;
for piece in &*pieces {
match piece.location {
gimli::Location::Address { address } => {
if result.is_some() {
debug!(
"unsupported DW_AT_location with multiple addresses: {:?}",
pieces
);
} else {
let address = Address::new(address);
let size = match piece.size_in_bits.map(|x| (x + 7) / 8) {
Some(size) => Size::new(size),
None => Size::none(),
};
result = Some((address, size));
}
}
gimli::Location::Empty
| gimli::Location::Register { .. }
| gimli::Location::Value { .. }
| gimli::Location::ImplicitPointer { .. } => {}
_ => debug!("unknown DW_AT_location piece: {:?}", piece),
}
}
result
}
fn evaluate_local_variable_location<'input, Endian>(
unit: &gimli::UnitHeader<Reader<'input, Endian>>,
range: Range,
expression: gimli::Expression<Reader<'input, Endian>>,
variable: &mut LocalVariable<'input>,
) where
Endian: gimli::Endianity,
{
let pieces = match evaluate_simple(unit, expression, false) {
Ok(locations) => locations,
Err(_e) => {
// This happens a lot, not sure if bugs or bad DWARF.
//debug!("simple evaluation failed: {}: {:?}", _e, expression.0);
return;
}
};
for piece in &pieces {
if piece.is_value {
continue;
}
// Can this be Literal too?
if let Location::Address { address } = piece.location {
if variable.address.is_some() {
if address != variable.address {
// TODO: combine address ranges?
debug!(
"unsupported DW_AT_location with multiple addresses: {:?}",
pieces
);
}
} else {
variable.address = address;
if let Some(bit_size) = piece.bit_size.get() {
variable.size = Size::new((bit_size + 7) / 8);
}
}
}
}
variable
.locations
.extend(pieces.into_iter().map(|piece| (range, piece)));
}
fn evaluate_parameter_location<'input, Endian>(
unit: &gimli::UnitHeader<Reader<'input, Endian>>,
range: Range,
expression: gimli::Expression<Reader<'input, Endian>>,
parameter: &mut Parameter<'input>,
) where
Endian: gimli::Endianity,
{
let pieces = match evaluate_simple(unit, expression, false) {
Ok(locations) => locations,
Err(_e) => {
// This happens a lot, not sure if bugs or bad DWARF.
//debug!("simple evaluation failed: {}: {:?}", _e, expression.0);
return;
}
};
parameter
.locations
.extend(pieces.into_iter().map(|piece| (range, piece)));
}
fn evaluate_simple<'input, Endian>(
unit: &gimli::UnitHeader<Reader<'input, Endian>>,
expression: gimli::Expression<Reader<'input, Endian>>,
_object_address: bool,
) -> Result<Vec<Piece>>
where
Endian: gimli::Endianity + 'input,
{
let encoding = unit.encoding();
let addr_mask = if encoding.address_size == 8 {
!0u64
} else {
(1 << (8 * u64::from(encoding.address_size))) - 1
};
let mut bytes = expression.0;
let mut pieces = Vec::new();
let mut next_bit_offset = 0;
let mut add_piece = |pieces: &mut Vec<Piece>,
location: Location,
location_offset: u64,
is_value: bool,
bit_size: Size| {
let bit_offset = next_bit_offset;
next_bit_offset += bit_size.get().unwrap_or(0);
pieces.push(Piece {
bit_offset,
bit_size,
location,
location_offset,
is_value,
});
};
let mut stack = Vec::new();
let pop = |stack: &mut Vec<Location>| match stack.pop() {
Some(value) => Ok(value),
None => Err(gimli::Error::NotEnoughStackItems),
};
let mut location = None;
while !bytes.is_empty() {
match gimli::Operation::parse(&mut bytes, encoding)? {
gimli::Operation::Nop => {}
gimli::Operation::Register { register } => {
location = Some((
Location::Register {
register: register.into(),
},
false,
));
}
gimli::Operation::ImplicitValue { .. } => {
// Unimplemented.
location = Some((Location::Other, true));
}
gimli::Operation::ImplicitPointer { .. } => {
// Unimplemented.
location = Some((Location::Other, false));
}
gimli::Operation::StackValue => {
location = Some((pop(&mut stack)?, true));
}
gimli::Operation::EntryValue { .. }
| gimli::Operation::ParameterRef { .. }
| gimli::Operation::TypedLiteral { .. }
| gimli::Operation::PushObjectAddress => {
// Unimplemented.
stack.push(Location::Other);
}
gimli::Operation::UnsignedConstant { value } => {
stack.push(Location::Literal { value });
}
gimli::Operation::SignedConstant { value } => {
stack.push(Location::Literal {
value: value as u64,
});
}
gimli::Operation::RegisterOffset {
register, offset, ..
} => {
// TODO: compare this against CFA, and push CfaOffset instead if it matches
stack.push(Location::RegisterOffset {
register: register.into(),
offset,
});
}
gimli::Operation::FrameOffset { offset } => {
stack.push(Location::FrameOffset { offset });
}
gimli::Operation::CallFrameCFA => {
stack.push(Location::CfaOffset { offset: 0 });
}
gimli::Operation::Address { address } => {
stack.push(Location::Address {
address: Address::new(address),
});
}
gimli::Operation::AddressIndex { .. } | gimli::Operation::ConstantIndex { .. } => {
// Unimplemented.
stack.push(Location::Other);
}
gimli::Operation::TLS => {
let location = match pop(&mut stack)? {
Location::Literal { value } => Location::TlsOffset { offset: value },
Location::Other => Location::Other,
location => {
debug!("unsupported TLS: {:?}", location);
Location::Other
}
};
stack.push(location);
}
gimli::Operation::Piece {
size_in_bits,
bit_offset,
} => {
let location = stack.pop().unwrap_or(Location::Empty);
add_piece(
&mut pieces,
location,
bit_offset.unwrap_or(0),
false,
Size::new(size_in_bits),
);
}
gimli::Operation::Drop => {
pop(&mut stack)?;
}
gimli::Operation::Swap => {
let one = pop(&mut stack)?;
let two = pop(&mut stack)?;
stack.push(one);
stack.push(two);
}
gimli::Operation::Rot => {
let one = pop(&mut stack)?;
let two = pop(&mut stack)?;
let three = pop(&mut stack)?;
stack.push(one);
stack.push(three);
stack.push(two);
}
gimli::Operation::Pick { index } => {
let index = index as usize;
if index >= stack.len() {
return Err(gimli::Error::NotEnoughStackItems.into());
}
let location = stack[stack.len() - index - 1];
stack.push(location);
}
gimli::Operation::PlusConstant { value: constant } => {
let location = match pop(&mut stack)? {
Location::Literal { value } => {
let value = value.wrapping_add(constant) & addr_mask;
Location::Literal { value }
}
Location::RegisterOffset { register, offset } => {
let offset = ((offset as u64).wrapping_add(constant) & addr_mask) as i64;
Location::RegisterOffset { register, offset }
}
Location::FrameOffset { offset } => {
let offset = ((offset as u64).wrapping_add(constant) & addr_mask) as i64;
Location::FrameOffset { offset }
}
Location::CfaOffset { offset } => {
let offset = ((offset as u64).wrapping_add(constant) & addr_mask) as i64;
Location::CfaOffset { offset }
}
Location::Other => Location::Other,
location => {
debug!("unsupported PlusConstant: {:?}", location);
Location::Other
}
};
stack.push(location);
}
gimli::Operation::Plus => {
let one = pop(&mut stack)?;
let two = pop(&mut stack)?;
match (one, two) {
(Location::Other, _) | (_, Location::Other) => Location::Other,
(Location::RegisterOffset { .. }, Location::RegisterOffset { .. }) => {
// Seen in practice, but we can't handle this yet.
Location::Other
}
location => {
debug!("unsupported Plus: {:?}", location);
Location::Other
}
};
}
gimli::Operation::Minus => {
let one = pop(&mut stack)?;
let two = pop(&mut stack)?;
match (one, two) {
(Location::Other, _) | (_, Location::Other) => Location::Other,
(Location::RegisterOffset { .. }, Location::RegisterOffset { .. }) => {
// Seen in practice, but we can't handle this yet.
Location::Other
}
(Location::Literal { value }, Location::FrameOffset { offset }) => {
Location::FrameOffset {
offset: offset - value as i64,
}
}
location => {
debug!("unsupported Minus: {:?}", location);
Location::Other
}
};
}
gimli::Operation::Neg
| gimli::Operation::Not
| gimli::Operation::Abs
| gimli::Operation::Convert { .. }
| gimli::Operation::Reinterpret { .. } => {
// Unimplemented unary operations.
pop(&mut stack)?;
stack.push(Location::Other);
}
gimli::Operation::Mul
| gimli::Operation::Div
| gimli::Operation::Mod
| gimli::Operation::Shl
| gimli::Operation::Shr
| gimli::Operation::Shra
| gimli::Operation::And
| gimli::Operation::Or
| gimli::Operation::Xor
| gimli::Operation::Eq
| gimli::Operation::Ne
| gimli::Operation::Gt
| gimli::Operation::Ge
| gimli::Operation::Lt
| gimli::Operation::Le => {
// Unimplemented binary operations.
pop(&mut stack)?;
pop(&mut stack)?;
stack.push(Location::Other);
}
gimli::Operation::Deref { space, .. } => {
// Unimplemented.
pop(&mut stack)?;
if space {
pop(&mut stack)?;
}
stack.push(Location::Other);
}
gimli::Operation::Bra { .. }
| gimli::Operation::Skip { .. }
| gimli::Operation::Call { .. } => {
// Unimplemented.
// We can't even push Location::Other for Bra.
// Skip and Call could be implemented if needed.
return Ok(pieces);
}
gimli::Operation::WasmLocal { .. }
| gimli::Operation::WasmGlobal { .. }
| gimli::Operation::WasmStack { .. } => {
// Unimplemented.
location = Some((Location::Other, false));
}
}
if let Some((location, is_value)) = location {
if bytes.is_empty() {
if !pieces.is_empty() {
return Err(gimli::Error::InvalidPiece.into());
}
add_piece(&mut pieces, location, 0, is_value, Size::none());
} else {
match gimli::Operation::parse(&mut bytes, encoding)? {
gimli::Operation::Piece {
size_in_bits,
bit_offset,
} => {
add_piece(
&mut pieces,
location,
bit_offset.unwrap_or(0),
is_value,
Size::new(size_in_bits),
);
}
_ => {
return Err(gimli::Error::InvalidPiece.into());
}
}
}
}
location = None;
}
if pieces.is_empty() {
if let Some(location) = stack.pop() {
add_piece(&mut pieces, location, 0, false, Size::none());
}
}
Ok(pieces)
}
fn evaluate<'input, Endian>(
unit: &gimli::UnitHeader<Reader<'input, Endian>>,
expression: gimli::Expression<Reader<'input, Endian>>,
object_address: bool,
) -> Vec<gimli::Piece<Reader<'input, Endian>>>
where
Endian: gimli::Endianity + 'input,
{
let mut evaluation = expression.evaluation(unit.encoding());
if object_address {
evaluation.set_object_address(0);
evaluation.set_initial_value(0);
}
let mut result = evaluation.evaluate();
loop {
match result {
Ok(gimli::EvaluationResult::Complete) => {
return evaluation.result();
}
Ok(gimli::EvaluationResult::RequiresRelocatedAddress(address)) => {
result = evaluation.resume_with_relocated_address(address);
}
Ok(_x) => {
debug!("incomplete evaluation: {:?}", _x);
return Vec::new();
}
Err(e) => {
debug!("evaluation failed: {}", e);
return Vec::new();
}
}
}
}
impl From<gimli::Range> for Range {
#[inline]
fn from(range: gimli::Range) -> Range {
Range {
begin: range.begin,
end: range.end,
}
}
}
impl From<gimli::Register> for Register {
#[inline]
fn from(register: gimli::Register) -> Register {
Register(register.0)
}
}
impl From<gimli::UnitSectionOffset> for FunctionOffset {
#[inline]
fn from(o: gimli::UnitSectionOffset) -> FunctionOffset {
let o = match o {
gimli::UnitSectionOffset::DebugInfoOffset(o) => o,
_ => panic!("unexpected offset {:?}", o),
};
FunctionOffset::new(o.0)
}
}
impl From<gimli::UnitSectionOffset> for ParameterOffset {
#[inline]
fn from(o: gimli::UnitSectionOffset) -> ParameterOffset {
let o = match o {
gimli::UnitSectionOffset::DebugInfoOffset(o) => o,
_ => panic!("unexpected offset {:?}", o),
};
ParameterOffset::new(o.0)
}
}
impl From<gimli::UnitSectionOffset> for MemberOffset {
#[inline]
fn from(o: gimli::UnitSectionOffset) -> MemberOffset {
let o = match o {
gimli::UnitSectionOffset::DebugInfoOffset(o) => o,
_ => panic!("unexpected offset {:?}", o),
};
MemberOffset::new(o.0)
}
}
impl From<gimli::UnitSectionOffset> for TypeOffset {
#[inline]
fn from(o: gimli::UnitSectionOffset) -> TypeOffset {
let o = match o {
gimli::UnitSectionOffset::DebugInfoOffset(o) => o,
_ => panic!("unexpected offset {:?}", o),
};
TypeOffset::new(o.0)
}
}
impl From<gimli::UnitSectionOffset> for VariableOffset {
#[inline]
fn from(o: gimli::UnitSectionOffset) -> VariableOffset {
let o = match o {
gimli::UnitSectionOffset::DebugInfoOffset(o) => o,
_ => panic!("unexpected offset {:?}", o),
};
VariableOffset::new(o.0)
}
}
fn parse_debug_info_offset<'input, Endian>(
dwarf_unit: &DwarfUnit<'input, Endian>,
attr: &gimli::Attribute<Reader<'input, Endian>>,
) -> Option<gimli::UnitSectionOffset>
where
Endian: gimli::Endianity,
{
match attr.value() {
gimli::AttributeValue::UnitRef(offset) => Some(offset.to_unit_section_offset(dwarf_unit)),
gimli::AttributeValue::DebugInfoRef(offset) => {
Some(gimli::UnitSectionOffset::DebugInfoOffset(offset))
}
other => {
debug!("unknown offset: {:?}", other);
None
}
}
}
fn parse_function_offset<'input, Endian>(
dwarf_unit: &DwarfUnit<'input, Endian>,
attr: &gimli::Attribute<Reader<'input, Endian>>,
) -> Option<FunctionOffset>
where
Endian: gimli::Endianity,
{
parse_debug_info_offset(dwarf_unit, attr).map(|x| x.into())
}
fn parse_parameter_offset<'input, Endian>(
dwarf_unit: &DwarfUnit<'input, Endian>,
attr: &gimli::Attribute<Reader<'input, Endian>>,
) -> Option<ParameterOffset>
where
Endian: gimli::Endianity,
{
parse_debug_info_offset(dwarf_unit, attr).map(|x| x.into())
}
fn parse_member_offset<'input, Endian>(
dwarf_unit: &DwarfUnit<'input, Endian>,
attr: &gimli::Attribute<Reader<'input, Endian>>,
) -> Option<MemberOffset>
where
Endian: gimli::Endianity,
{
parse_debug_info_offset(dwarf_unit, attr).map(|x| x.into())
}
fn parse_type_offset<'input, Endian>(
dwarf_unit: &DwarfUnit<'input, Endian>,
attr: &gimli::Attribute<Reader<'input, Endian>>,
) -> Option<TypeOffset>
where
Endian: gimli::Endianity,
{
parse_debug_info_offset(dwarf_unit, attr).map(|x| x.into())
}
fn parse_variable_offset<'input, Endian>(
dwarf_unit: &DwarfUnit<'input, Endian>,
attr: &gimli::Attribute<Reader<'input, Endian>>,
) -> Option<VariableOffset>
where
Endian: gimli::Endianity,
{
parse_debug_info_offset(dwarf_unit, attr).map(|x| x.into())
}
fn parse_source_file<'input, Endian>(
dwarf: &DwarfDebugInfo<'input, Endian>,
dwarf_unit: &DwarfUnit<'input, Endian>,
attr: &gimli::Attribute<Reader<'input, Endian>>,
source: &mut Source<'input>,
) where
Endian: gimli::Endianity,
{
match attr.value() {
gimli::AttributeValue::FileIndex(val) => {
if val != 0 {
if let Some(ref line) = dwarf_unit.line_program {
if let Some(entry) = line.header().file(val) {
source.file = dwarf.string(dwarf_unit, entry.path_name());
if let Some(directory) = entry.directory(line.header()) {
source.directory = dwarf.string(dwarf_unit, directory);
} else {
debug!("invalid directory index {}", entry.directory_index());
}
} else {
debug!("invalid file index {}", val);
}
}
}
}
val => {
debug!("unknown DW_AT_decl_file attribute value: {:?}", val);
}
}
}
fn parse_source_line<Endian>(attr: &gimli::Attribute<Reader<Endian>>, source: &mut Source)
where
Endian: gimli::Endianity,
{
match attr.value() {
gimli::AttributeValue::Udata(val) => {
if val != 0 {
if val <= u64::from(u32::MAX) {
source.line = val as u32;
} else {
debug!("large source line: {}", val);
}
}
}
val => {
debug!("unknown DW_AT_decl_line attribute value: {:?}", val);
}
}
}
fn parse_source_column<Endian>(attr: &gimli::Attribute<Reader<Endian>>, source: &mut Source)
where
Endian: gimli::Endianity,
{
match attr.value() {
gimli::AttributeValue::Udata(val) => {
if val != 0 {
if val <= u64::from(u32::MAX) {
source.column = val as u32;
} else {
debug!("large source column: {}", val);
}
}
}
val => {
debug!("unknown DW_AT_decl_column attribute value: {:?}", val);
}
}
}
struct DwarfFrame<R: gimli::Reader<Offset = usize>> {
debug_frame: DebugFrameTable<R>,
eh_frame: EhFrameTable<R>,
}
impl<R: gimli::Reader<Offset = usize>> DwarfFrame<R> {
fn new(
debug_frame: gimli::DebugFrame<R>,
eh_frame: gimli::EhFrame<R>,
bases: gimli::BaseAddresses,
) -> Self {
DwarfFrame {
debug_frame: DebugFrameTable::new(debug_frame),
eh_frame: EhFrameTable::new(eh_frame, bases),
}
}
fn get_cfi(&self, range: Range) -> Option<Vec<Cfi>> {
let cfi = self
.eh_frame
.get_cfi(range)
.or_else(|| self.debug_frame.get_cfi(range));
if cfi.is_none() {
debug!("no FDE for 0x{:x}[0x{:x}]", range.begin, range.size());
}
cfi
}
}
struct DebugFrameTable<R: gimli::Reader<Offset = usize>> {
debug_frame: gimli::DebugFrame<R>,
bases: gimli::BaseAddresses,
fdes: FdeOffsetTable,
}
impl<R: gimli::Reader<Offset = usize>> DebugFrameTable<R> {
fn new(debug_frame: gimli::DebugFrame<R>) -> Self {
let bases = gimli::BaseAddresses::default();
let fdes = FdeOffsetTable::new(&debug_frame, &bases);
DebugFrameTable {
debug_frame,
bases,
fdes,
}
}
fn get_cfi(&self, range: Range) -> Option<Vec<Cfi>> {
get_cfi(&self.debug_frame, &self.bases, &self.fdes, range)
}
}
struct EhFrameTable<R: gimli::Reader<Offset = usize>> {
eh_frame: gimli::EhFrame<R>,
bases: gimli::BaseAddresses,
fdes: FdeOffsetTable,
}
impl<R: gimli::Reader<Offset = usize>> EhFrameTable<R> {
fn new(eh_frame: gimli::EhFrame<R>, bases: gimli::BaseAddresses) -> Self {
let fdes = FdeOffsetTable::new(&eh_frame, &bases);
EhFrameTable {
eh_frame,
bases,
fdes,
}
}
fn get_cfi(&self, range: Range) -> Option<Vec<Cfi>> {
get_cfi(&self.eh_frame, &self.bases, &self.fdes, range)
}
}
struct FdeOffsetTable {
offsets: Vec<(Range, usize)>,
}
impl FdeOffsetTable {
fn new<R: gimli::Reader<Offset = usize>, S: gimli::UnwindSection<R>>(
section: &S,
bases: &gimli::BaseAddresses,
) -> Self
where
S::Offset: gimli::UnwindOffset,
{
let mut offsets = Vec::new();
let mut entries = section.entries(bases);
while let Ok(Some(entry)) = entries.next() {
match entry {
gimli::CieOrFde::Cie(_) => {}
gimli::CieOrFde::Fde(partial) => {
if let Ok(fde) = partial.parse(S::cie_from_offset) {
let range = Range {
begin: fde.initial_address(),
end: fde.initial_address() + fde.len(),
};
offsets.push((range, fde.offset()));
}
}
}
}
offsets.sort_by_key(|x| x.0);
FdeOffsetTable { offsets }
}
fn find(&self, address: u64) -> Option<usize> {
// FIXME: doesn't handle overlapping
let index = match self.offsets.binary_search_by_key(&address, |x| x.0.begin) {
Ok(x) => Some(x),
Err(x) => {
if x > 0 {
Some(x - 1)
} else {
None
}
}
};
if let Some(index) = index {
let (range, offset) = self.offsets[index];
if range.begin <= address && range.end > address {
return Some(offset);
}
}
None
}
}
fn get_cfi<R: gimli::Reader, S: gimli::UnwindSection<R>>(
section: &S,
bases: &gimli::BaseAddresses,
fdes: &FdeOffsetTable,
range: Range,
) -> Option<Vec<Cfi>>
where
S::Offset: gimli::UnwindOffset,
{
let address = range.begin;
let size = range.size();
let fde_offset = S::Offset::from(fdes.find(address)?);
let fde = section
.fde_from_offset(bases, fde_offset, S::cie_from_offset)
.ok()?;
if (address, size) != (fde.initial_address(), fde.len()) {
debug!(
"FDE address mismatch: want function 0x{:x}[0x{:x}], found FDE 0x{:x}[0x{:x}]",
address,
size,
fde.initial_address(),
fde.len(),
);
}
let mut cfi = Vec::new();
cfi.push((Address::none(), CfiDirective::StartProc));
if let Some(personality) = fde.personality() {
// TODO: better handling of indirect
let address = match personality {
gimli::Pointer::Direct(x) => Address::new(x),
gimli::Pointer::Indirect(x) => Address::new(x),
};
cfi.push((Address::none(), CfiDirective::Personality(address)));
}
if let Some(lsda) = fde.lsda() {
// TODO: better handling of indirect
let address = match lsda {
gimli::Pointer::Direct(x) => Address::new(x),
gimli::Pointer::Indirect(x) => Address::new(x),
};
cfi.push((Address::none(), CfiDirective::Lsda(address)));
}
if fde.is_signal_trampoline() {
cfi.push((Address::none(), CfiDirective::SignalFrame));
}
let cie = fde.cie();
let mut address = 0;
let mut instructions = cie.instructions(section, bases);
while let Ok(Some(instruction)) = instructions.next() {
if let Some(directive) = convert_cfi(cie, instruction, &mut address) {
cfi.push((Address::none(), directive))
}
}
let mut address = fde.initial_address();
let mut instructions = fde.instructions(section, bases);
while let Ok(Some(instruction)) = instructions.next() {
if let Some(directive) = convert_cfi(cie, instruction, &mut address) {
cfi.push((Address::new(address), directive))
}
}
cfi.push((
Address::new(fde.initial_address() + fde.len()),
CfiDirective::EndProc,
));
Some(cfi)
}
fn convert_cfi<R: gimli::Reader>(
cie: &gimli::CommonInformationEntry<R>,
instruction: gimli::CallFrameInstruction<R>,
loc: &mut u64,
) -> Option<CfiDirective> {
match instruction {
gimli::CallFrameInstruction::SetLoc { address } => {
*loc = address;
None
}
gimli::CallFrameInstruction::AdvanceLoc { delta } => {
*loc += delta as u64 * cie.code_alignment_factor();
None
}
gimli::CallFrameInstruction::DefCfa { register, offset } => {
Some(CfiDirective::DefCfa(register.into(), offset as i64))
}
gimli::CallFrameInstruction::DefCfaSf {
register,
factored_offset,
} => {
let offset = factored_offset * cie.data_alignment_factor();
Some(CfiDirective::DefCfa(register.into(), offset as i64))
}
gimli::CallFrameInstruction::DefCfaRegister { register } => {
Some(CfiDirective::DefCfaRegister(register.into()))
}
gimli::CallFrameInstruction::DefCfaOffset { offset } => {
Some(CfiDirective::DefCfaOffset(offset as i64))
}
gimli::CallFrameInstruction::DefCfaOffsetSf { factored_offset } => {
let offset = factored_offset * cie.data_alignment_factor();
Some(CfiDirective::DefCfaOffset(offset as i64))
}
gimli::CallFrameInstruction::Offset {
register,
factored_offset,
} => {
let offset = factored_offset as i64 * cie.data_alignment_factor();
Some(CfiDirective::Offset(register.into(), offset))
}
gimli::CallFrameInstruction::OffsetExtendedSf {
register,
factored_offset,
} => {
let offset = factored_offset * cie.data_alignment_factor();
Some(CfiDirective::Offset(register.into(), offset))
}
gimli::CallFrameInstruction::ValOffset {
register,
factored_offset,
} => {
let offset = factored_offset as i64 * cie.data_alignment_factor();
Some(CfiDirective::ValOffset(register.into(), offset))
}
gimli::CallFrameInstruction::ValOffsetSf {
register,
factored_offset,
} => {
let offset = factored_offset * cie.data_alignment_factor();
Some(CfiDirective::ValOffset(register.into(), offset))
}
gimli::CallFrameInstruction::Register {
dest_register,
src_register,
} => Some(CfiDirective::Register(
dest_register.into(),
src_register.into(),
)),
gimli::CallFrameInstruction::Undefined { register } => {
Some(CfiDirective::Undefined(register.into()))
}
gimli::CallFrameInstruction::SameValue { register } => {
Some(CfiDirective::SameValue(register.into()))
}
gimli::CallFrameInstruction::Restore { register } => {
Some(CfiDirective::Restore(register.into()))
}
gimli::CallFrameInstruction::RememberState => Some(CfiDirective::RememberState),
gimli::CallFrameInstruction::RestoreState => Some(CfiDirective::RestoreState),
gimli::CallFrameInstruction::ArgsSize { .. }
| gimli::CallFrameInstruction::DefCfaExpression { .. }
| gimli::CallFrameInstruction::Expression { .. }
| gimli::CallFrameInstruction::ValExpression { .. } => {
debug!("Unhandled CFI: {:?}", instruction);
Some(CfiDirective::Other)
}
gimli::CallFrameInstruction::Nop => None,
}
}
|
use bit_vec::BitVec;
use rand::distributions::{Bernoulli, Distribution};
use std::convert::TryInto;
#[derive(Debug)]
pub struct BitMap {
bit_vector: Box<[BitVec; 2]>,
current_index: bool,
len: usize,
}
// Arbitrary size bitmap where most significant bit is leftmost
impl BitMap {
pub fn new(length: u64) -> BitMap {
if length == 0 {
panic!("Cannot create 0-length bit-map");
}
let bit_vector = [
BitVec::from_elem(length.try_into().unwrap(), false),
BitVec::from_elem(length.try_into().unwrap(), false),
];
BitMap {
bit_vector: Box::new(bit_vector),
current_index: false,
len: length as usize,
}
}
pub fn random(length: u64, density: f64) -> BitMap {
let d = Bernoulli::new(density).unwrap();
if length == 0 {
panic!("Cannot create 0-length bit-map");
}
let mut rng = rand::thread_rng();
let bit_vector = [
BitVec::from_fn(length.try_into().unwrap(), |_| d.sample(&mut rng)),
BitVec::from_elem(length.try_into().unwrap(), false),
];
BitMap {
bit_vector: Box::new(bit_vector),
current_index: false,
len: length as usize,
}
}
// if bit_num is less than len, then return bit at that position, otherwise panic
pub fn get(&self, bit_num: usize) -> u8 {
let bit_vector = &self.bit_vector[self.current_index as usize];
match bit_vector.get(bit_num) {
Some(boolean) => {
if boolean {
1
} else {
0
}
}
None => {
panic!("Invalid bit index! Must be less than {}", self.len)
}
}
}
pub fn set(&mut self, bit_num: usize) {
let bit_vector = &mut self.bit_vector[self.current_index as usize];
if bit_num >= self.len {
panic!("Invalid bit index! Must be less than {}", self.len);
}
bit_vector.set(bit_num, true);
}
pub fn unset(&mut self, bit_num: usize) {
let bit_vector = &mut self.bit_vector[self.current_index as usize];
if bit_num >= self.len {
panic!("Invalid bit index! Must be less than {}", self.len);
}
bit_vector.set(bit_num, false);
}
pub fn size(&self) -> usize {
self.len
}
pub fn get_vec(&self) -> Vec<bool> {
let bit_vector = &self.bit_vector[self.current_index as usize];
let mut rv: Vec<bool> = Vec::with_capacity(self.len);
for v in bit_vector.iter() {
rv.push(v);
}
rv
}
pub fn to_bit_vec(&self) -> Vec<u8> {
let mut bit_vec: Vec<u8> = Vec::with_capacity(self.len);
for i in 0..self.len {
bit_vec.push(self.get(i));
}
bit_vec
}
pub fn clear(&mut self) {
let bit_vector = &mut self.bit_vector[self.current_index as usize];
bit_vector.clear();
}
pub fn rule_step(&mut self, rule: u8) {
self.bit_vector[!self.current_index as usize].clear();
enum Offset {
PlusOne,
Zero,
MinusOne,
}
let len = self.size();
for i in 0..len {
let mut flags: u8 = 0;
for offset in [Offset::PlusOne, Offset::Zero, Offset::MinusOne].iter() {
let (flag_mask, index) = match offset {
Offset::PlusOne => (0b100, (i + 1) % self.size()),
Offset::Zero => (0b010, i),
Offset::MinusOne => {
if i == 0 {
(0b001, len - 1)
} else {
(0b001, i - 1)
}
}
};
if self.get(index) == 1 {
flags |= flag_mask;
}
}
let new_val = rule & (1 << flags);
if new_val != 0 {
self.bit_vector[!self.current_index as usize].set(i, true);
}
}
self.current_index = !self.current_index;
}
}
|
fn main() {
let image = raytracer::render();
raytracer::show_image(image);
}
|
pub mod login_controller;
pub mod posts_controller;
pub use login_controller::*;
pub use posts_controller::*;
|
use Result;
use cameras::Config;
use glacio::camera::Image;
/// A summary of information about an image.
#[derive(Debug, Serialize)]
pub struct Summary {
/// The image's date and time, as a string.
pub datetime: String,
/// The image's url on a remote server.
pub url: String,
}
impl Summary {
/// Creates a new summary from a server and an `Image`.
pub fn new(image: &Image, config: &Config) -> Result<Summary> {
let server = config.server()?;
Ok(Summary {
datetime: image.datetime().to_rfc3339(),
url: server.url_for(image)?.as_ref().to_string(),
})
}
}
|
use alloc::boxed::Box;
use core::fmt::Debug;
use crate::ClientContext;
/// An asset resolver context allows clients to provide additional data
/// to the resolver for use during resolution. Clients may provide this
/// data via a context object of their own (subject to restrictions below).
/// An ArResolverContext is simply a wrapper around this object that
/// allows it to be treated as a single type.
///
/// A client-defined context object must implement the following traits:
/// - [`std::clone::Clone`]
/// - [`std::fmt::Debug`]
/// - [`std::cmp::PartialOrd`]
/// - [`std::cmp::PartialEq`]
/// - ['std::hash::Hash`]
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd)]
pub struct ResolverContext {
context: Option<Box<dyn ClientContext>>,
}
impl ResolverContext {
/// Constructor
///
/// # Examples
/// ```
/// use ar::{ClientContext, ResolverContext};
///
/// #[derive(Clone, Debug, PartialEq, PartialOrd, Hash)]
/// struct Context1 {
/// id: usize,
/// }
///
/// impl ClientContext for Context1 {}
///
/// let client_ctx = Context1 { id: 1 };
/// ResolverContext::new(client_ctx);
/// ```
pub fn new(context: impl ClientContext + 'static) -> Self {
Self {
context: Some(Box::new(context)),
}
}
/// Return pointer to the context object held in this asset resolver
/// context
///
/// # Examples
/// ```
/// use ar::{ClientContext, ResolverContext};
///
/// #[derive(Clone, Debug, PartialOrd, PartialEq, Hash)]
/// struct Context1 {
/// id: usize,
/// }
///
/// impl ClientContext for Context1 {}
///
/// #[derive(Clone, Debug, PartialOrd, PartialEq, Hash)]
/// struct Context2 {
/// id: usize,
/// }
///
/// impl ClientContext for Context2 {}
///
/// let client_ctx = Context1 { id: 1 };
/// let resolver = ResolverContext::new(client_ctx);
/// assert!(resolver.get::<Context2>().is_none());
/// assert!(resolver.get::<Context1>().is_some());
/// ```
pub fn get<Context>(&self) -> Option<&dyn ClientContext>
where
Context: ClientContext + 'static,
{
if let Some(context) = &self.context {
if context
.as_ref()
.as_any()
.downcast_ref::<Context>()
.is_some()
{
return Some(context.as_ref());
}
}
None
}
/// Returns whether this resolver context is empty.
pub fn is_empty(&self) -> bool {
self.context.is_some()
}
}
|
use std::fs::File;
use std::io::Write;
pub fn mov(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm mov: Unable to write to the file.");
write!(f, "mov {}, {}\n", des, src).expect("asm: Unable to write to the file.");
}
pub fn ret(f: &mut File) {
write!(f, " ").expect("asm ret: stat_return: Unable to write to the file.");
write!(f, "ret\n").expect("asm ret: stat_return: Unable to write to the file.");
}
pub fn neg(des: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "neg {}\n", des).expect("asm neg: Unable to write to the file.");
}
pub fn not(des: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "not {}\n", des).expect("asm neg: Unable to write to the file.");
}
pub fn cmp(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "cmp {}, {}\n", des, src).expect("asm neg: Unable to write to the file.");
}
pub fn sete(des: &str, f: &mut File) {
write!(f, " ").expect("asm neg: Unable to write to the file.");
write!(f, "sete {}\n", des).expect("asm neg: Unable to write to the file.");
}
pub fn push(des: &str, f: &mut File) {
write!(f, " ").expect("asm push: Unable to write to the file.");
write!(f, "push {}\n", des).expect("asm push: Unable to write to the file.");
}
pub fn pop(des: &str, f: &mut File) {
write!(f, " ").expect("asm pop: Unable to write to the file.");
write!(f, "pop {}\n", des).expect("asm pop: Unable to write to the file.");
}
pub fn add(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm add: Unable to write to the file.");
write!(f, "add {}, {}\n", des, src).expect("asm add: Unable to write to the file.");
}
pub fn sub(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm sub: Unable to write to the file.");
write!(f, "sub {}, {}\n", des, src).expect("asm sub: Unable to write to the file.");
}
pub fn imul(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm imul: Unable to write to the file.");
write!(f, "imul {}, {}\n", des, src).expect("asm imul: Unable to write to the file.");
}
pub fn idiv(des: &str, f: &mut File) {
write!(f, " ").expect("asm idiv: Unable to write to the file.");
write!(f, "idiv {}\n", des).expect("asm idiv: Unable to write to the file.");
}
pub fn xor(des: &str, src: &str, f: &mut File) {
write!(f, " ").expect("asm xor: Unable to write to the file.");
write!(f, "xor {}, {}\n", des, src).expect("asm xor: Unable to write to the file.");
}
|
#![deny(missing_docs)]
#![allow(clippy::all)] // generated code is not clippy friendly
//! Flat OpenStreetMap (OSM) data format providing an efficient *random* data
//! access through [memory mapped files].
//!
//! The data format is described and implemented in [flatdata]. The [schema]
//! describes the fundamental OSM data structures: nodes, ways, relations and
//! tags as simple non-nested data structures. The relations between these are
//! expressed through indexes.
//!
//! ## Examples
//!
//! Open a flatdata archive (compiled from pbf with [`osmflatc`]) and iterate
//! through nodes:
//!
//! ```rust,no_run
//! use osmflat::{FileResourceStorage, Osm};
//!
//! fn main() {
//! let storage = FileResourceStorage::new("path/to/archive.osm.flatdata");
//! let archive = Osm::open(storage).unwrap();
//!
//! for node in archive.nodes().iter() {
//! println!("{:?}", node);
//! }
//! }
//! ```
//!
//! For more examples, see the [examples] directory.
//!
//! [flatdata]: https://github.com/heremaps/flatdata
//! [schema]: https://github.com/boxdot/osmflat-rs/blob/master/flatdata/osm.flatdata
//! [memory mapped files]: https://en.wikipedia.org/wiki/Memory-mapped_file
//! [`osmflatc`]: https://github.com/boxdot/osmflat-rs/tree/master/osmflatc
//! [examples]: https://github.com/boxdot/osmflat-rs/tree/master/osmflat/examples
// generated osm module
include!("osmflat_generated.rs");
mod tags;
pub use crate::osm::*;
pub use crate::tags::*;
// re-export what is needed from flatdata to use osmflat
pub use flatdata::FileResourceStorage;
#[cfg(feature = "tar")]
pub use flatdata::TarArchiveResourceStorage;
|
use std::time::Duration;
use std::time::Instant;
use std::time::SystemTime;
pub fn timed<T>(tag: &str, body: impl FnOnce() -> T) -> T {
let start = Instant::now();
let result = body();
let end = Instant::now();
log::trace!("run of {} {:?}", tag, (end - start));
result
}
pub struct Tracer {
last_time: SystemTime,
report_every: u8,
counter: Duration,
tag: String,
}
impl Tracer {
pub fn new(tag: String, report_every: u8) -> Self {
log::trace!("insance of {}", std::any::type_name::<Self>());
Self {
last_time: SystemTime::now(),
report_every,
//Just because!
counter: Duration::from_nanos(10),
tag,
}
}
pub fn run<T>(&mut self, body: impl FnOnce() -> T) -> T {
let start = Instant::now();
let result = body();
let end = Instant::now();
let new_ts = SystemTime::now();
let dur = end - start;
self.counter = (self.counter + dur) / 2;
match new_ts.duration_since(self.last_time) {
Ok(elapsed) => {
let _result = if elapsed >= Duration::from_secs(self.report_every as u64) {
// let fps = (self.counter as f64) / elapsed.as_secs_f64();
self.last_time = new_ts;
self.counter = dur;
log::trace!("run of {} {:?}", self.tag, self.counter);
};
}
Err(_) => (),
}
result
}
}
|
mod details;
mod index;
pub use details::*;
pub use index::*;
use crate::page::AppRoute;
use patternfly_yew::*;
use std::fmt::Formatter;
use std::str::FromStr;
use yew_router::prelude::*;
#[derive(Switch, Debug, Clone, PartialEq, Eq)]
pub enum Pages {
#[to = "/{name}/{*:details}"]
Details {
name: String,
details: DetailsSection,
},
#[to = "/"]
Index,
}
#[derive(Switch, Debug, Clone, PartialEq, Eq)]
pub enum DetailsSection {
#[to = "integrations"]
Integrations,
#[to = "yaml"]
Yaml,
#[end]
Overview,
}
pub type ApplicationTabs = TabsRouter<AppRoute, DetailsSection>;
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ApplicationContext {
Any,
Single(String),
}
impl Default for ApplicationContext {
fn default() -> Self {
Self::Any
}
}
impl core::fmt::Display for ApplicationContext {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
Self::Any => Ok(()),
Self::Single(app) => f.write_str(app),
}
}
}
impl FromStr for ApplicationContext {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(if s.is_empty() {
Self::Any
} else {
Self::Single(s.to_string())
})
}
}
|
use std::fmt;
use actix::prelude::*;
use once_cell::sync::Lazy;
use regex::Regex;
use scraper::{ElementRef, Html};
use serde::{Deserialize, Serialize};
use crate::caltrain_status::Error::{HtmlError, InvalidIntError};
static NUMERIC: Lazy<Regex> = Lazy::new(|| Regex::new("[0-9]+").unwrap());
#[derive(Serialize, Deserialize, Clone, Copy, PartialOrd, Ord, Eq, PartialEq, Debug)]
pub enum TrainType {
Local,
Limited,
BabyBullet,
}
impl<T: AsRef<str>> From<T> for TrainType {
fn from(s: T) -> Self {
if s.as_ref().contains("Local") {
TrainType::Local
} else if s.as_ref().contains("Limited") {
TrainType::Limited
} else if s.as_ref().contains("Baby Bullet") {
TrainType::BabyBullet
} else {
panic!("error, unknown train type: {}", s.as_ref());
}
}
}
impl fmt::Display for TrainType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use TrainType::*;
match self {
Local => write!(f, "Local"),
Limited => write!(f, "Limited"),
BabyBullet => write!(f, "Baby Bullet"),
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct IncomingTrain {
id: u16,
ttype: TrainType,
min_till_departure: u16,
}
impl IncomingTrain {
fn new(id: u16, ttype: TrainType, min_till_arrival: u16) -> Self {
IncomingTrain {
id,
ttype,
min_till_departure: min_till_arrival,
}
}
pub fn get_id(&self) -> u16 {
self.id
}
pub fn get_train_type(&self) -> TrainType {
self.ttype
}
pub fn get_min_till_departure(&self) -> u16 {
self.min_till_departure
}
}
#[derive(Serialize, Deserialize, Clone, Copy, PartialOrd, Ord, Eq, PartialEq, Debug)]
pub enum Direction {
Northbound,
Southbound,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
pub struct CaltrainStatus {
northbound: Vec<IncomingTrain>,
southbound: Vec<IncomingTrain>,
}
impl CaltrainStatus {
pub fn get_status(&self) -> (&[IncomingTrain], &[IncomingTrain]) {
(self.northbound.as_ref(), self.southbound.as_ref())
}
pub fn from_html<T: AsRef<str>>(text: T) -> Result<CaltrainStatus, Error> {
struct WalkerState {
train_id: Option<String>,
train_type: Option<String>,
time_till_departure: Option<String>,
last_text: Option<String>,
current_table_no: i32,
last_read_class: Option<LastReadClass>,
northbound: Vec<IncomingTrain>,
southbound: Vec<IncomingTrain>,
}
let mut state = WalkerState {
train_id: None,
train_type: None,
time_till_departure: None,
last_text: None,
current_table_no: 0,
last_read_class: None,
southbound: vec![],
northbound: vec![],
};
#[derive(Clone, Copy)]
enum LastReadClass {
TrainId,
TrainType,
TimeTillArrival,
}
use LastReadClass::*;
let dom = Html::parse_document(text.as_ref());
fn make_incoming_train(tid: &str, ttype: &str, tta: &str) -> Result<IncomingTrain, Error> {
let tid = tid.parse::<u16>()?;
let ttype = ttype.into();
let min_till_arrival = if let Some(m) = NUMERIC.find(&tta) {
m.as_str().parse::<u16>()?
} else {
9001
};
Ok(IncomingTrain::new(tid, ttype, min_till_arrival))
}
fn walk(node: &ElementRef, state: &mut WalkerState) -> Result<(), Error> {
for attr in &node.value().attrs {
if &attr.0.local == "class" {
let val = attr.1.as_bytes();
if val.ends_with(b"ipf-st-ip-trains-subtable") {
state.current_table_no += 1;
}
if val.ends_with(b"ipf-st-ip-trains-subtable-td-id") {
state.last_read_class = Some(TrainId);
}
if val.ends_with(b"ipf-st-ip-trains-subtable-td-type") {
state.last_read_class = Some(TrainType);
}
if val.ends_with(b"ipf-st-ip-trains-subtable-td-arrivaltime") {
state.last_read_class = Some(TimeTillArrival);
}
}
}
for child in node.children() {
if let Some(e) = ElementRef::wrap(child) {
walk(&e, state)?;
} else {
if let Some(t) = child.value().as_text() {
state.last_text = Some(t.text.to_string());
}
}
}
let res = match (&state.last_read_class, &state.last_text) {
(Some(ttype), Some(text)) => {
match ttype {
TrainId => state.train_id = Some(text.clone()),
TrainType => state.train_type = Some(text.clone()),
TimeTillArrival => state.time_till_departure = Some(text.clone()),
}
(None, None)
}
(a, b) => (*a, b.as_ref().cloned()),
};
state.last_read_class = res.0;
state.last_text = res.1;
let mut should_wipe = false;
if let (Some(tid), Some(ttype), Some(tta)) = (
&mut state.train_id,
&mut state.train_type,
&mut state.time_till_departure,
) {
if state.current_table_no == 1 {
state.southbound.push(make_incoming_train(tid, ttype, tta)?);
}
if state.current_table_no == 2 {
state.northbound.push(make_incoming_train(tid, ttype, tta)?);
}
should_wipe = true;
}
if should_wipe {
state.train_id = None;
state.train_type = None;
state.time_till_departure = None;
}
Ok(())
}
walk(&dom.root_element(), &mut state)?;
Ok(CaltrainStatus {
northbound: state.northbound,
southbound: state.southbound,
})
}
}
impl Message for CaltrainStatus {
type Result = ();
}
#[derive(Debug)]
pub enum Error {
HtmlError(std::io::Error),
InvalidIntError(std::num::ParseIntError),
}
impl std::error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
HtmlError(e) => write!(f, "{:?}", e),
InvalidIntError(e) => write!(f, "{}", e),
}
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
HtmlError(e)
}
}
impl From<std::num::ParseIntError> for Error {
fn from(e: std::num::ParseIntError) -> Self {
InvalidIntError(e)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn from_html() {
assert_eq!(
CaltrainStatus::from_html(include_str!("test.html")).unwrap(),
CaltrainStatus {
northbound: vec![
IncomingTrain {
id: 429,
ttype: TrainType::Local,
min_till_departure: 59,
},
IncomingTrain {
id: 431,
ttype: TrainType::Local,
min_till_departure: 149,
},
IncomingTrain {
id: 433,
ttype: TrainType::Local,
min_till_departure: 239,
}
],
southbound: vec![
IncomingTrain {
id: 802,
ttype: TrainType::BabyBullet,
min_till_departure: 6,
},
IncomingTrain {
id: 428,
ttype: TrainType::Local,
min_till_departure: 63,
},
IncomingTrain {
id: 430,
ttype: TrainType::Local,
min_till_departure: 153,
}
],
}
)
}
#[test]
fn from_html_no_southbound() {
assert_eq!(
CaltrainStatus::from_html(include_str!("test2.html")).unwrap(),
CaltrainStatus {
northbound: vec![
IncomingTrain {
id: 803,
ttype: TrainType::BabyBullet,
min_till_departure: 69,
},
IncomingTrain {
id: 435,
ttype: TrainType::Local,
min_till_departure: 86,
},
IncomingTrain {
id: 437,
ttype: TrainType::Local,
min_till_departure: 176,
}
],
southbound: vec![],
}
)
}
}
|
// Copyright 2018-2019 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std::fmt;
use arrayref::array_ref;
use bincode::{deserialize, serialize, serialized_size};
use ordered_float::OrderedFloat;
use uuid::{Bytes, Uuid};
use crate::error::DataError;
/// We define a set of types, associated with simple integers, to annotate values stored
/// in LMDB. This is to avoid an accidental 'cast' from a value of one type to another.
/// For this reason we don't simply use `deserialize` from the `bincode` crate.
#[repr(u8)]
#[derive(Debug, PartialEq, Eq)]
pub enum Type {
Bool = 1,
U64 = 2,
I64 = 3,
F64 = 4,
Instant = 5, // Millisecond-precision timestamp.
Uuid = 6,
Str = 7,
Json = 8,
Blob = 9,
}
/// We use manual tagging, because <https://github.com/serde-rs/serde/issues/610>.
impl Type {
pub fn from_tag(tag: u8) -> Result<Type, DataError> {
#![allow(clippy::unnecessary_lazy_evaluations)]
Type::from_primitive(tag).ok_or_else(|| DataError::UnknownType(tag))
}
#[allow(clippy::wrong_self_convention)]
pub fn to_tag(self) -> u8 {
self as u8
}
fn from_primitive(p: u8) -> Option<Type> {
match p {
1 => Some(Type::Bool),
2 => Some(Type::U64),
3 => Some(Type::I64),
4 => Some(Type::F64),
5 => Some(Type::Instant),
6 => Some(Type::Uuid),
7 => Some(Type::Str),
8 => Some(Type::Json),
9 => Some(Type::Blob),
_ => None,
}
}
}
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str(match *self {
Type::Bool => "bool",
Type::U64 => "u64",
Type::I64 => "i64",
Type::F64 => "f64",
Type::Instant => "instant",
Type::Uuid => "uuid",
Type::Str => "str",
Type::Json => "json",
Type::Blob => "blob",
})
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum Value<'v> {
Bool(bool),
U64(u64),
I64(i64),
F64(OrderedFloat<f64>),
Instant(i64), // Millisecond-precision timestamp.
Uuid(&'v Bytes),
Str(&'v str),
Json(&'v str),
Blob(&'v [u8]),
}
#[derive(Clone, Debug, PartialEq)]
pub enum OwnedValue {
Bool(bool),
U64(u64),
I64(i64),
F64(f64),
Instant(i64), // Millisecond-precision timestamp.
Uuid(Uuid),
Str(String),
Json(String), // TODO
Blob(Vec<u8>),
}
fn uuid(bytes: &[u8]) -> Result<Value, DataError> {
if bytes.len() == 16 {
Ok(Value::Uuid(array_ref![bytes, 0, 16]))
} else {
Err(DataError::InvalidUuid)
}
}
impl<'v> Value<'v> {
pub fn from_tagged_slice(slice: &'v [u8]) -> Result<Value<'v>, DataError> {
let (tag, data) = slice.split_first().ok_or(DataError::Empty)?;
let t = Type::from_tag(*tag)?;
Value::from_type_and_data(t, data)
}
fn from_type_and_data(t: Type, data: &'v [u8]) -> Result<Value<'v>, DataError> {
if t == Type::Uuid {
return deserialize(data)
.map_err(|e| DataError::DecodingError {
value_type: t,
err: e,
})
.map(uuid)?;
}
match t {
Type::Bool => deserialize(data).map(Value::Bool),
Type::U64 => deserialize(data).map(Value::U64),
Type::I64 => deserialize(data).map(Value::I64),
Type::F64 => deserialize(data).map(OrderedFloat).map(Value::F64),
Type::Instant => deserialize(data).map(Value::Instant),
Type::Str => deserialize(data).map(Value::Str),
Type::Json => deserialize(data).map(Value::Json),
Type::Blob => deserialize(data).map(Value::Blob),
Type::Uuid => {
// Processed above to avoid verbose duplication of error transforms.
unreachable!()
}
}
.map_err(|e| DataError::DecodingError {
value_type: t,
err: e,
})
}
pub fn to_bytes(&self) -> Result<Vec<u8>, DataError> {
match self {
Value::Bool(v) => serialize(&(Type::Bool.to_tag(), *v)),
Value::U64(v) => serialize(&(Type::U64.to_tag(), *v)),
Value::I64(v) => serialize(&(Type::I64.to_tag(), *v)),
Value::F64(v) => serialize(&(Type::F64.to_tag(), v.0)),
Value::Instant(v) => serialize(&(Type::Instant.to_tag(), *v)),
Value::Str(v) => serialize(&(Type::Str.to_tag(), v)),
Value::Json(v) => serialize(&(Type::Json.to_tag(), v)),
Value::Blob(v) => serialize(&(Type::Blob.to_tag(), v)),
Value::Uuid(v) => serialize(&(Type::Uuid.to_tag(), v)),
}
.map_err(DataError::EncodingError)
}
pub fn serialized_size(&self) -> Result<u64, DataError> {
match self {
Value::Bool(v) => serialized_size(&(Type::Bool.to_tag(), *v)),
Value::U64(v) => serialized_size(&(Type::U64.to_tag(), *v)),
Value::I64(v) => serialized_size(&(Type::I64.to_tag(), *v)),
Value::F64(v) => serialized_size(&(Type::F64.to_tag(), v.0)),
Value::Instant(v) => serialized_size(&(Type::Instant.to_tag(), *v)),
Value::Str(v) => serialized_size(&(Type::Str.to_tag(), v)),
Value::Json(v) => serialized_size(&(Type::Json.to_tag(), v)),
Value::Blob(v) => serialized_size(&(Type::Blob.to_tag(), v)),
Value::Uuid(v) => serialized_size(&(Type::Uuid.to_tag(), v)),
}
.map_err(DataError::EncodingError)
}
}
impl<'v> From<&'v Value<'v>> for OwnedValue {
fn from(value: &Value) -> OwnedValue {
match value {
Value::Bool(v) => OwnedValue::Bool(*v),
Value::U64(v) => OwnedValue::U64(*v),
Value::I64(v) => OwnedValue::I64(*v),
Value::F64(v) => OwnedValue::F64(**v),
Value::Instant(v) => OwnedValue::Instant(*v),
Value::Uuid(v) => OwnedValue::Uuid(Uuid::from_bytes(**v)),
Value::Str(v) => OwnedValue::Str((*v).to_string()),
Value::Json(v) => OwnedValue::Json((*v).to_string()),
Value::Blob(v) => OwnedValue::Blob(v.to_vec()),
}
}
}
impl<'v> From<&'v OwnedValue> for Value<'v> {
fn from(value: &OwnedValue) -> Value {
match value {
OwnedValue::Bool(v) => Value::Bool(*v),
OwnedValue::U64(v) => Value::U64(*v),
OwnedValue::I64(v) => Value::I64(*v),
OwnedValue::F64(v) => Value::F64(OrderedFloat::from(*v)),
OwnedValue::Instant(v) => Value::Instant(*v),
OwnedValue::Uuid(v) => Value::Uuid(v.as_bytes()),
OwnedValue::Str(v) => Value::Str(v),
OwnedValue::Json(v) => Value::Json(v),
OwnedValue::Blob(v) => Value::Blob(v),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_value_serialized_size() {
// | Value enum | tag: 1 byte | value_payload |
// |----------------------------------------------------------|
// | I64 | 1 | 8 |
// | U64 | 1 | 8 |
// | Bool | 1 | 1 |
// | Instant | 1 | 8 |
// | F64 | 1 | 8 |
// | Uuid | 1 | 16 |
// | Str/Blob/Json | 1 |(8: len + sizeof(payload))|
assert_eq!(Value::I64(-1000).serialized_size().unwrap(), 9);
assert_eq!(Value::U64(1000u64).serialized_size().unwrap(), 9);
assert_eq!(Value::Bool(true).serialized_size().unwrap(), 2);
assert_eq!(
Value::Instant(1_558_020_865_224).serialized_size().unwrap(),
9
);
assert_eq!(
Value::F64(OrderedFloat(10000.1)).serialized_size().unwrap(),
9
);
assert_eq!(Value::Str("hello!").serialized_size().unwrap(), 15);
assert_eq!(Value::Str("¡Hola").serialized_size().unwrap(), 15);
assert_eq!(Value::Blob(b"hello!").serialized_size().unwrap(), 15);
assert_eq!(
uuid(b"\x9f\xe2\xc4\xe9\x3f\x65\x4f\xdb\xb2\x4c\x02\xb1\x52\x59\x71\x6c")
.unwrap()
.serialized_size()
.unwrap(),
17
);
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentExtendedFilter {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GenericResourceFilter {
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tagname: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tagvalue: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceGroupFilter {
#[serde(rename = "tagName", default, skip_serializing_if = "Option::is_none")]
pub tag_name: Option<String>,
#[serde(rename = "tagValue", default, skip_serializing_if = "Option::is_none")]
pub tag_value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TemplateLink {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub uri: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "relativePath", default, skip_serializing_if = "Option::is_none")]
pub relative_path: Option<String>,
#[serde(rename = "contentVersion", default, skip_serializing_if = "Option::is_none")]
pub content_version: Option<String>,
#[serde(rename = "queryString", default, skip_serializing_if = "Option::is_none")]
pub query_string: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ParametersLink {
pub uri: String,
#[serde(rename = "contentVersion", default, skip_serializing_if = "Option::is_none")]
pub content_version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub template: Option<serde_json::Value>,
#[serde(rename = "templateLink", default, skip_serializing_if = "Option::is_none")]
pub template_link: Option<TemplateLink>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
#[serde(rename = "parametersLink", default, skip_serializing_if = "Option::is_none")]
pub parameters_link: Option<ParametersLink>,
pub mode: deployment_properties::Mode,
#[serde(rename = "debugSetting", default, skip_serializing_if = "Option::is_none")]
pub debug_setting: Option<DebugSetting>,
#[serde(rename = "onErrorDeployment", default, skip_serializing_if = "Option::is_none")]
pub on_error_deployment: Option<OnErrorDeployment>,
#[serde(rename = "expressionEvaluationOptions", default, skip_serializing_if = "Option::is_none")]
pub expression_evaluation_options: Option<ExpressionEvaluationOptions>,
}
pub mod deployment_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Mode {
Incremental,
Complete,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DebugSetting {
#[serde(rename = "detailLevel", default, skip_serializing_if = "Option::is_none")]
pub detail_level: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Deployment {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
pub properties: DeploymentProperties,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopedDeployment {
pub location: String,
pub properties: DeploymentProperties,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentExportResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub template: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentWhatIf {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
pub properties: DeploymentWhatIfProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScopedDeploymentWhatIf {
pub location: String,
pub properties: DeploymentWhatIfProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentWhatIfProperties {
#[serde(flatten)]
pub deployment_properties: DeploymentProperties,
#[serde(rename = "whatIfSettings", default, skip_serializing_if = "Option::is_none")]
pub what_if_settings: Option<DeploymentWhatIfSettings>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentWhatIfSettings {
#[serde(rename = "resultFormat", default, skip_serializing_if = "Option::is_none")]
pub result_format: Option<deployment_what_if_settings::ResultFormat>,
}
pub mod deployment_what_if_settings {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ResultFormat {
ResourceIdOnly,
FullResourcePayloads,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ApiProfile {
#[serde(rename = "profileVersion", default, skip_serializing_if = "Option::is_none")]
pub profile_version: Option<String>,
#[serde(rename = "apiVersion", default, skip_serializing_if = "Option::is_none")]
pub api_version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AliasPathMetadata {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<alias_path_metadata::Type>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub attributes: Option<alias_path_metadata::Attributes>,
}
pub mod alias_path_metadata {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
NotSpecified,
Any,
String,
Object,
Array,
Integer,
Number,
Boolean,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Attributes {
None,
Modifiable,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AliasPath {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
#[serde(rename = "apiVersions", default, skip_serializing_if = "Vec::is_empty")]
pub api_versions: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub pattern: Option<AliasPattern>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<AliasPathMetadata>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AliasPattern {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub phrase: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub variable: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<alias_pattern::Type>,
}
pub mod alias_pattern {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
NotSpecified,
Extract,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Alias {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub paths: Vec<AliasPath>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<alias::Type>,
#[serde(rename = "defaultPath", default, skip_serializing_if = "Option::is_none")]
pub default_path: Option<String>,
#[serde(rename = "defaultPattern", default, skip_serializing_if = "Option::is_none")]
pub default_pattern: Option<AliasPattern>,
#[serde(rename = "defaultMetadata", default, skip_serializing_if = "Option::is_none")]
pub default_metadata: Option<AliasPathMetadata>,
}
pub mod alias {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
NotSpecified,
PlainText,
Mask,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProviderExtendedLocation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "extendedLocations", default, skip_serializing_if = "Vec::is_empty")]
pub extended_locations: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProviderResourceType {
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub locations: Vec<String>,
#[serde(rename = "locationMappings", default, skip_serializing_if = "Vec::is_empty")]
pub location_mappings: Vec<ProviderExtendedLocation>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub aliases: Vec<Alias>,
#[serde(rename = "apiVersions", default, skip_serializing_if = "Vec::is_empty")]
pub api_versions: Vec<String>,
#[serde(rename = "defaultApiVersion", default, skip_serializing_if = "Option::is_none")]
pub default_api_version: Option<String>,
#[serde(rename = "zoneMappings", default, skip_serializing_if = "Vec::is_empty")]
pub zone_mappings: Vec<ZoneMapping>,
#[serde(rename = "apiProfiles", default, skip_serializing_if = "Vec::is_empty")]
pub api_profiles: Vec<ApiProfile>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capabilities: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Provider {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub namespace: Option<String>,
#[serde(rename = "registrationState", default, skip_serializing_if = "Option::is_none")]
pub registration_state: Option<String>,
#[serde(rename = "registrationPolicy", default, skip_serializing_if = "Option::is_none")]
pub registration_policy: Option<String>,
#[serde(rename = "resourceTypes", default, skip_serializing_if = "Vec::is_empty")]
pub resource_types: Vec<ProviderResourceType>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BasicDependency {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(rename = "resourceName", default, skip_serializing_if = "Option::is_none")]
pub resource_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Dependency {
#[serde(rename = "dependsOn", default, skip_serializing_if = "Vec::is_empty")]
pub depends_on: Vec<BasicDependency>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
#[serde(rename = "resourceName", default, skip_serializing_if = "Option::is_none")]
pub resource_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentPropertiesExtended {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<deployment_properties_extended::ProvisioningState>,
#[serde(rename = "correlationId", default, skip_serializing_if = "Option::is_none")]
pub correlation_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timestamp: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub duration: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub outputs: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub providers: Vec<Provider>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dependencies: Vec<Dependency>,
#[serde(rename = "templateLink", default, skip_serializing_if = "Option::is_none")]
pub template_link: Option<TemplateLink>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
#[serde(rename = "parametersLink", default, skip_serializing_if = "Option::is_none")]
pub parameters_link: Option<ParametersLink>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub mode: Option<deployment_properties_extended::Mode>,
#[serde(rename = "debugSetting", default, skip_serializing_if = "Option::is_none")]
pub debug_setting: Option<DebugSetting>,
#[serde(rename = "onErrorDeployment", default, skip_serializing_if = "Option::is_none")]
pub on_error_deployment: Option<OnErrorDeploymentExtended>,
#[serde(rename = "templateHash", default, skip_serializing_if = "Option::is_none")]
pub template_hash: Option<String>,
#[serde(rename = "outputResources", default, skip_serializing_if = "Vec::is_empty")]
pub output_resources: Vec<ResourceReference>,
#[serde(rename = "validatedResources", default, skip_serializing_if = "Vec::is_empty")]
pub validated_resources: Vec<ResourceReference>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
pub mod deployment_properties_extended {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
NotSpecified,
Accepted,
Running,
Ready,
Creating,
Created,
Deleting,
Deleted,
Canceled,
Failed,
Succeeded,
Updating,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Mode {
Incremental,
Complete,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceReference {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OnErrorDeployment {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<on_error_deployment::Type>,
#[serde(rename = "deploymentName", default, skip_serializing_if = "Option::is_none")]
pub deployment_name: Option<String>,
}
pub mod on_error_deployment {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
LastSuccessful,
SpecificDeployment,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OnErrorDeploymentExtended {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<on_error_deployment_extended::Type>,
#[serde(rename = "deploymentName", default, skip_serializing_if = "Option::is_none")]
pub deployment_name: Option<String>,
}
pub mod on_error_deployment_extended {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
LastSuccessful,
SpecificDeployment,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentValidateResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DeploymentPropertiesExtended>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentExtended {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DeploymentPropertiesExtended>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<DeploymentExtended>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProviderListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Provider>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProviderResourceTypeListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ProviderResourceType>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GenericResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub plan: Option<Plan>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<String>,
#[serde(rename = "managedBy", default, skip_serializing_if = "Option::is_none")]
pub managed_by: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<Sku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<Identity>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GenericResourceExpanded {
#[serde(flatten)]
pub generic_resource: GenericResource,
#[serde(rename = "createdTime", default, skip_serializing_if = "Option::is_none")]
pub created_time: Option<String>,
#[serde(rename = "changedTime", default, skip_serializing_if = "Option::is_none")]
pub changed_time: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Plan {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub product: Option<String>,
#[serde(rename = "promotionCode", default, skip_serializing_if = "Option::is_none")]
pub promotion_code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Sku {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub size: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub family: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub model: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Identity {
#[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")]
pub principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<identity::Type>,
#[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")]
pub user_assigned_identities: Option<serde_json::Value>,
}
pub mod identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
UserAssigned,
#[serde(rename = "SystemAssigned, UserAssigned")]
SystemAssignedUserAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<GenericResourceExpanded>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceGroup {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ResourceGroupProperties>,
pub location: String,
#[serde(rename = "managedBy", default, skip_serializing_if = "Option::is_none")]
pub managed_by: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceGroupPatchable {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ResourceGroupProperties>,
#[serde(rename = "managedBy", default, skip_serializing_if = "Option::is_none")]
pub managed_by: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceGroupProperties {
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceGroupListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ResourceGroup>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourcesMoveInfo {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub resources: Vec<String>,
#[serde(rename = "targetResourceGroup", default, skip_serializing_if = "Option::is_none")]
pub target_resource_group: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExportTemplateRequest {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub resources: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub options: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagCount {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagValue {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "tagValue", default, skip_serializing_if = "Option::is_none")]
pub tag_value: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub count: Option<TagCount>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "tagName", default, skip_serializing_if = "Option::is_none")]
pub tag_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub count: Option<TagCount>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub values: Vec<TagValue>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<TagDetails>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TargetResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "resourceName", default, skip_serializing_if = "Option::is_none")]
pub resource_name: Option<String>,
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HttpMessage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub content: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentOperationProperties {
#[serde(rename = "provisioningOperation", default, skip_serializing_if = "Option::is_none")]
pub provisioning_operation: Option<deployment_operation_properties::ProvisioningOperation>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub timestamp: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub duration: Option<String>,
#[serde(rename = "serviceRequestId", default, skip_serializing_if = "Option::is_none")]
pub service_request_id: Option<String>,
#[serde(rename = "statusCode", default, skip_serializing_if = "Option::is_none")]
pub status_code: Option<String>,
#[serde(rename = "statusMessage", default, skip_serializing_if = "Option::is_none")]
pub status_message: Option<StatusMessage>,
#[serde(rename = "targetResource", default, skip_serializing_if = "Option::is_none")]
pub target_resource: Option<TargetResource>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub request: Option<HttpMessage>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub response: Option<HttpMessage>,
}
pub mod deployment_operation_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningOperation {
NotSpecified,
Create,
Delete,
Waiting,
AzureAsyncOperationWaiting,
ResourceCacheWaiting,
Action,
Read,
EvaluateDeploymentOutput,
DeploymentCleanup,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentOperation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "operationId", default, skip_serializing_if = "Option::is_none")]
pub operation_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<DeploymentOperationProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DeploymentOperationsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<DeploymentOperation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceProviderOperationDisplayProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SubResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceGroupExportResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub template: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TemplateHashResult {
#[serde(rename = "minifiedTemplate", default, skip_serializing_if = "Option::is_none")]
pub minified_template: Option<String>,
#[serde(rename = "templateHash", default, skip_serializing_if = "Option::is_none")]
pub template_hash: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WhatIfPropertyChange {
pub path: String,
#[serde(rename = "propertyChangeType")]
pub property_change_type: what_if_property_change::PropertyChangeType,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub before: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub after: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub children: Vec<WhatIfPropertyChange>,
}
pub mod what_if_property_change {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PropertyChangeType {
Create,
Delete,
Modify,
Array,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WhatIfChange {
#[serde(rename = "resourceId")]
pub resource_id: String,
#[serde(rename = "changeType")]
pub change_type: what_if_change::ChangeType,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub before: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub after: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub delta: Vec<WhatIfPropertyChange>,
}
pub mod what_if_change {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ChangeType {
Create,
Delete,
Ignore,
Deploy,
NoChange,
Modify,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WhatIfOperationProperties {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub changes: Vec<WhatIfChange>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WhatIfOperationResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WhatIfOperationProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Tags {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagsPatchResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<tags_patch_resource::Operation>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<Tags>,
}
pub mod tags_patch_resource {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Operation {
Replace,
Merge,
Delete,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TagsResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub properties: Tags,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StatusMessage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExpressionEvaluationOptions {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub scope: Option<expression_evaluation_options::Scope>,
}
pub mod expression_evaluation_options {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Scope {
NotSpecified,
Outer,
Inner,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ZoneMapping {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub zones: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorResponse>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
|
use anyhow::{Context, Result};
use regex::Regex;
use std::collections::HashSet;
use structopt::{
clap::crate_authors, clap::crate_description, clap::crate_version, clap::AppSettings, StructOpt,
};
use uvm_cli::{options::ColorOption, set_colors_enabled, set_loglevel};
use uvm_core::unity::VersionType;
use uvm_versions;
const SETTINGS: &'static [AppSettings] = &[
AppSettings::ColoredHelp,
AppSettings::DontCollapseArgsInUsage,
];
#[derive(StructOpt, Debug)]
#[structopt(version = crate_version!(), author = crate_authors!(), about = crate_description!(), settings = SETTINGS)]
struct Opts {
/// list all available versions for the selected version types
#[structopt(short, long)]
all: bool,
/// list final versions
#[structopt(short = "f", long = "final")]
list_final: bool,
/// list beta versions
#[structopt(short = "b", long = "beta")]
list_beta: bool,
/// list alpha versions
#[structopt(long = "alpha")]
list_alpha: bool,
/// list patch versions
#[structopt(short = "p", long = "patch")]
list_patch: bool,
/// a regex pattern to filter results
#[structopt()]
pattern: Option<Regex>,
/// print debug output
#[structopt(short, long)]
debug: bool,
/// print more output
#[structopt(short, long, parse(from_occurrences))]
verbose: i32,
/// Color:.
#[structopt(short, long, possible_values = &ColorOption::variants(), case_insensitive = true, default_value)]
color: ColorOption,
}
impl uvm_versions::VersionListOptions for Opts {
fn list_variants(&self) -> HashSet<VersionType> {
let mut variants: HashSet<VersionType> = HashSet::with_capacity(4);
if self.has_variant_flags() {
if self.list_alpha {
variants.insert(VersionType::Alpha);
}
if self.list_beta {
variants.insert(VersionType::Beta);
}
if self.list_patch {
variants.insert(VersionType::Patch);
}
if self.list_final {
variants.insert(VersionType::Final);
}
} else {
variants.insert(VersionType::Alpha);
variants.insert(VersionType::Beta);
variants.insert(VersionType::Patch);
variants.insert(VersionType::Final);
}
variants
}
fn has_variant_flags(&self) -> bool {
self.list_alpha || self.list_beta || self.list_patch || self.list_final
}
fn filter_versions(&self) -> bool {
!self.all
}
fn pattern(&self) -> &Option<Regex> {
&self.pattern
}
fn debug(&self) -> bool {
self.debug
}
}
fn main() -> Result<()> {
let opt = Opts::from_args();
set_colors_enabled(&opt.color);
set_loglevel(opt.debug.then(|| 2).unwrap_or(opt.verbose));
uvm_versions::list_versions(&opt).context("failed to list Unity modules")?;
Ok(())
}
|
pub mod state;
pub mod utils; |
// Create the Error, ErrorKind, ResultExt, and Result types
error_chain!{
foreign_links {
Io(::io::Error);
Hyper(::hyper::Error);
SerdeJSON(::serde_json::Error);
}
}
|
#![feature(str_split_once)]
#[macro_use]
extern crate lazy_static;
mod day01;
mod day02;
mod day03;
mod day04;
mod day05;
mod day06;
mod day07;
mod day08;
mod day09;
mod day10;
mod day11;
mod day12;
mod day13;
fn main() {
day01::part1();
day01::part2();
day02::part1();
day02::part2();
day03::part1();
day03::part2();
day04::part1();
day04::part2();
day05::part1();
day05::part2();
day06::part1();
day06::part2();
day07::part1();
day07::part2();
day08::part1();
day08::part2();
day09::part1();
day09::part2();
day10::part1();
day10::part2();
day11::part1();
day11::part2();
day12::part1();
day12::part2();
day13::part1();
day13::part2();
}
|
#[doc = "Register `FCR3` reader"]
pub type R = crate::R<FCR3_SPEC>;
#[doc = "Register `FCR3` writer"]
pub type W = crate::W<FCR3_SPEC>;
#[doc = "Field `TZSCFC` reader - TZSCFC"]
pub type TZSCFC_R = crate::BitReader;
#[doc = "Field `TZSCFC` writer - TZSCFC"]
pub type TZSCFC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TZICFC` reader - TZICFC"]
pub type TZICFC_R = crate::BitReader;
#[doc = "Field `TZICFC` writer - TZICFC"]
pub type TZICFC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MPCWM1FC` reader - MPCWM1FC"]
pub type MPCWM1FC_R = crate::BitReader;
#[doc = "Field `MPCWM1FC` writer - MPCWM1FC"]
pub type MPCWM1FC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MPCWM2FC` reader - MPCWM2FC"]
pub type MPCWM2FC_R = crate::BitReader;
#[doc = "Field `MPCWM2FC` writer - MPCWM2FC"]
pub type MPCWM2FC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MPCBB1FC` reader - MPCBB1FC"]
pub type MPCBB1FC_R = crate::BitReader;
#[doc = "Field `MPCBB1FC` writer - MPCBB1FC"]
pub type MPCBB1FC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MPCBB1_REGFC` reader - MPCBB1_REGFC"]
pub type MPCBB1_REGFC_R = crate::BitReader;
#[doc = "Field `MPCBB1_REGFC` writer - MPCBB1_REGFC"]
pub type MPCBB1_REGFC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MPCBB2FC` reader - MPCBB2FC"]
pub type MPCBB2FC_R = crate::BitReader;
#[doc = "Field `MPCBB2FC` writer - MPCBB2FC"]
pub type MPCBB2FC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MPCBB2_REGFC` reader - MPCBB2_REGFC"]
pub type MPCBB2_REGFC_R = crate::BitReader;
#[doc = "Field `MPCBB2_REGFC` writer - MPCBB2_REGFC"]
pub type MPCBB2_REGFC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - TZSCFC"]
#[inline(always)]
pub fn tzscfc(&self) -> TZSCFC_R {
TZSCFC_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - TZICFC"]
#[inline(always)]
pub fn tzicfc(&self) -> TZICFC_R {
TZICFC_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - MPCWM1FC"]
#[inline(always)]
pub fn mpcwm1fc(&self) -> MPCWM1FC_R {
MPCWM1FC_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - MPCWM2FC"]
#[inline(always)]
pub fn mpcwm2fc(&self) -> MPCWM2FC_R {
MPCWM2FC_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - MPCBB1FC"]
#[inline(always)]
pub fn mpcbb1fc(&self) -> MPCBB1FC_R {
MPCBB1FC_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - MPCBB1_REGFC"]
#[inline(always)]
pub fn mpcbb1_regfc(&self) -> MPCBB1_REGFC_R {
MPCBB1_REGFC_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - MPCBB2FC"]
#[inline(always)]
pub fn mpcbb2fc(&self) -> MPCBB2FC_R {
MPCBB2FC_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - MPCBB2_REGFC"]
#[inline(always)]
pub fn mpcbb2_regfc(&self) -> MPCBB2_REGFC_R {
MPCBB2_REGFC_R::new(((self.bits >> 7) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - TZSCFC"]
#[inline(always)]
#[must_use]
pub fn tzscfc(&mut self) -> TZSCFC_W<FCR3_SPEC, 0> {
TZSCFC_W::new(self)
}
#[doc = "Bit 1 - TZICFC"]
#[inline(always)]
#[must_use]
pub fn tzicfc(&mut self) -> TZICFC_W<FCR3_SPEC, 1> {
TZICFC_W::new(self)
}
#[doc = "Bit 2 - MPCWM1FC"]
#[inline(always)]
#[must_use]
pub fn mpcwm1fc(&mut self) -> MPCWM1FC_W<FCR3_SPEC, 2> {
MPCWM1FC_W::new(self)
}
#[doc = "Bit 3 - MPCWM2FC"]
#[inline(always)]
#[must_use]
pub fn mpcwm2fc(&mut self) -> MPCWM2FC_W<FCR3_SPEC, 3> {
MPCWM2FC_W::new(self)
}
#[doc = "Bit 4 - MPCBB1FC"]
#[inline(always)]
#[must_use]
pub fn mpcbb1fc(&mut self) -> MPCBB1FC_W<FCR3_SPEC, 4> {
MPCBB1FC_W::new(self)
}
#[doc = "Bit 5 - MPCBB1_REGFC"]
#[inline(always)]
#[must_use]
pub fn mpcbb1_regfc(&mut self) -> MPCBB1_REGFC_W<FCR3_SPEC, 5> {
MPCBB1_REGFC_W::new(self)
}
#[doc = "Bit 6 - MPCBB2FC"]
#[inline(always)]
#[must_use]
pub fn mpcbb2fc(&mut self) -> MPCBB2FC_W<FCR3_SPEC, 6> {
MPCBB2FC_W::new(self)
}
#[doc = "Bit 7 - MPCBB2_REGFC"]
#[inline(always)]
#[must_use]
pub fn mpcbb2_regfc(&mut self) -> MPCBB2_REGFC_W<FCR3_SPEC, 7> {
MPCBB2_REGFC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "TZIC interrupt clear register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fcr3::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fcr3::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FCR3_SPEC;
impl crate::RegisterSpec for FCR3_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fcr3::R`](R) reader structure"]
impl crate::Readable for FCR3_SPEC {}
#[doc = "`write(|w| ..)` method takes [`fcr3::W`](W) writer structure"]
impl crate::Writable for FCR3_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets FCR3 to value 0"]
impl crate::Resettable for FCR3_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![allow(unused_variables)]
fn main() {
let number = 3;
// if / else
// ---------
if number < 5 {
println!("lower than five");
} else if number == 5 {
println!("equal to five");
} else {
println!("greater than five");
}
// condition must always evaluate to boolean
// no falsy / truthy values
let result = if number < 5 {
"lower"
} else {
// 5 - ERROR: expected reference, found integer
// (all values returning from all branches must
// be the same type)
"not lower"
};
// lack of semicolons make the block being executed an
// expression returning that value which is then assigned
// to the variable
// loops
// -----
// loop {
// println!("infinite loop")
// }
// loop with no exit condition will be stuck forever
let mut counter = 0;
let result = loop {
counter += 1;
if counter == 10 {
break counter * 2; // loop will stop and the value of the
// expression to the right will be
// returned from it
}
};
// while
// -----
let mut number = 3;
while number != 0 {
// loop will keep going until
// condition is not true anymore
println!("{}!", number);
number -= 1;
}
println!("LIFTOFF!!!");
// for
// ---
let a = [10, 20, 30, 40, 50];
for element in a.iter() {
println!("the value is: {}", element)
}
// using a Range
for number in (1..4).rev() {
// rev() reverses the range
println!("{}!", number);
}
println!("LIFTOFF!!!");
}
|
use std::collections::BTreeSet;
use std::fs::read_dir;
use std::process::Command;
use insta::assert_snapshot;
/// Bit of a jank test, runs `cargo run -p hydroflow --example <EXAMPLE>` for all the
/// `example_*.rs` examples and uses `insta` to snapshot tests the stdout.
#[test]
fn test_all() {
let examples_files = read_dir("examples/")
.unwrap()
.flat_map(Result::ok)
.filter(|entry| {
entry
.file_type()
.map_or(false, |file_type| file_type.is_file())
})
.map(|entry| entry.file_name())
.map(|filename| filename.into_string().unwrap())
.filter(|filename| filename.starts_with("example_") && filename.ends_with(".rs"))
.collect::<BTreeSet<_>>();
for example_file in examples_files {
let name = example_file.strip_suffix(".rs").unwrap();
let output = Command::new("cargo")
.args(["run", "-p", "hydroflow", "--example"])
.arg(name)
.output()
.expect("Failed to run example.");
let output = String::from_utf8_lossy(&output.stdout);
assert_snapshot!(name, output);
}
}
|
pub struct Interpolator {
pub pts : Vec<[f32;2]>,
}
impl Interpolator {
pub fn at(&self, input : f32) -> f32 {
for i in 0..(self.pts.len() - 1) {
if self.pts[i][0] <= input && input <= self.pts[i + 1][0] {
let frac = (input - self.pts[i][0]) /
(self.pts[i + 1][0] - self.pts[i][0]);
return frac * (self.pts[i + 1][1] - self.pts[i][1])
+ self.pts[i][1];
}
}
return 0f32;
}
}
|
#[doc = "Register `DEACHINTMSK` reader"]
pub type R = crate::R<DEACHINTMSK_SPEC>;
#[doc = "Register `DEACHINTMSK` writer"]
pub type W = crate::W<DEACHINTMSK_SPEC>;
#[doc = "Field `IEP1INTM` reader - IN Endpoint 1 interrupt mask bit"]
pub type IEP1INTM_R = crate::BitReader;
#[doc = "Field `IEP1INTM` writer - IN Endpoint 1 interrupt mask bit"]
pub type IEP1INTM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OEP1INTM` reader - OUT Endpoint 1 interrupt mask bit"]
pub type OEP1INTM_R = crate::BitReader;
#[doc = "Field `OEP1INTM` writer - OUT Endpoint 1 interrupt mask bit"]
pub type OEP1INTM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 1 - IN Endpoint 1 interrupt mask bit"]
#[inline(always)]
pub fn iep1intm(&self) -> IEP1INTM_R {
IEP1INTM_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 17 - OUT Endpoint 1 interrupt mask bit"]
#[inline(always)]
pub fn oep1intm(&self) -> OEP1INTM_R {
OEP1INTM_R::new(((self.bits >> 17) & 1) != 0)
}
}
impl W {
#[doc = "Bit 1 - IN Endpoint 1 interrupt mask bit"]
#[inline(always)]
#[must_use]
pub fn iep1intm(&mut self) -> IEP1INTM_W<DEACHINTMSK_SPEC, 1> {
IEP1INTM_W::new(self)
}
#[doc = "Bit 17 - OUT Endpoint 1 interrupt mask bit"]
#[inline(always)]
#[must_use]
pub fn oep1intm(&mut self) -> OEP1INTM_W<DEACHINTMSK_SPEC, 17> {
OEP1INTM_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "OTG_HS device each endpoint interrupt register mask\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`deachintmsk::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`deachintmsk::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DEACHINTMSK_SPEC;
impl crate::RegisterSpec for DEACHINTMSK_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`deachintmsk::R`](R) reader structure"]
impl crate::Readable for DEACHINTMSK_SPEC {}
#[doc = "`write(|w| ..)` method takes [`deachintmsk::W`](W) writer structure"]
impl crate::Writable for DEACHINTMSK_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DEACHINTMSK to value 0"]
impl crate::Resettable for DEACHINTMSK_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate regex;
use std::io::{stdin,stdout,Write};
use std::process::Command;
pub mod mods;
pub use mods::parser::Node;
fn main() {
/*main = { [];const x = "Hello World!";println!("{}",x);mut y = "Hello World!";println!("{}",y);y = "Bye!";println!("{}",y);}*/
let big = "
fibonacci = { [(n:u32)->u128];
mut a = 0;
mut b = 1;
for { [i in 0..n-2];
const c = a + b;
a = b;
b = c;
};
return|b;
}
";
let vec5 = mods::lex(big.to_string());
let ast5 = mods::parse(vec5);
let ast5d = ast5.clone();
let ts_types = "type u8 = number;type u16 = number;type u32 = number;type u64 = number;type u128 = number;type i8 = number;type i16 = number;type i32 = number;type i64 = number;type i128 = number;";
println!("{}{}", ts_types, mods::tsgen(ast5));
println!("-----------------------");
println!("{}", mods::codegen(ast5d));
let mut s=String::new();
print!("> ");
let _=stdout().flush();
stdin().read_line(&mut s).expect("Did not enter a correct string");
if let Some('\n')=s.chars().next_back() {
s.pop();
}
if let Some('\r')=s.chars().next_back() {
s.pop();
}
let vec = mods::lex(s.to_string());
let ast = mods::parse(vec);
println!("{}", mods::codegen(ast));
}
|
impl Solution {
pub fn check_possibility(nums: Vec<i32>) -> bool {
let n = nums.len();
let mut cnt = 0;
//让数组可变
let mut nums = nums;
for i in 0..n-1{
let x = nums[i];
let y = nums[i+1];
if x > y{
cnt += 1 ;
if cnt > 1{
return false;
}
if i > 0 && nums[i+1] < nums[i-1]{
nums[i+1] = nums[i];
}
}
}
true
}
} |
use proconio::{fastout, input};
const MOD: i64 = 1_000_000_000 + 7;
#[fastout]
fn main() {
input! {
n: usize,
k: usize,
mut a_vec: [i64; n],
};
let (mut s, mut t): (Vec<i64>, Vec<i64>) = (Vec::new(), Vec::new());
a_vec.iter().for_each(|a| {
if *a >= 0 {
s.push(*a);
} else {
t.push(*a);
}
});
let s_len = s.len();
let t_len = t.len();
let mut _ok: bool = false;
if s_len > 0 {
if n == k {
_ok = t_len % 2 == 0;
} else {
_ok = true;
}
} else {
_ok = k % 2 == 0;
}
let mut ans = 1;
if !_ok {
a_vec.sort_by_key(|a| a.abs());
for i in 0..k {
ans *= a_vec[i];
}
ans = (ans % MOD) + MOD;
} else {
s.sort();
t.sort_by(|a, b| b.cmp(a));
if k % 2 == 1 {
ans *= s.pop().expect("");
ans %= MOD;
}
let mut p: Vec<i64> = Vec::new();
while s.len() >= 2 {
let x = s.pop().expect("") * s.pop().expect("") % MOD;
p.push(x);
}
while t.len() >= 2 {
let x = t.pop().expect("") * t.pop().expect("") % MOD;
p.push(x);
}
p.sort_by(|a, b| b.cmp(a));
for i in 0..(k / 2) {
ans *= p[i];
ans %= MOD;
}
}
println!("{}", ans);
}
|
use itertools::Itertools;
use std::collections::HashSet;
use std::env;
use std::iter::FromIterator;
use std::str::FromStr;
fn parse_path(path_str: &str) -> Vec<(i32, i32)> {
let mut point = (0, 0);
path_str
.split(',')
.flat_map(|dir| {
println!("started line {} at {:?}", dir, point);
let current = point;
let (dir, count) = dir.split_at(1);
let count = i32::from_str(count).expect("Couldn't parse count path");
let ret = match dir {
"U" => {
point.1 += count;
(current.1..current.1 + count)
.map(|y| (current.0, y + 1))
.collect::<Vec<(i32, i32)>>()
}
"D" => {
point.1 -= count;
(current.1 - count..current.1)
.rev()
.map(|y| (current.0, y + 1))
.collect::<Vec<(i32, i32)>>()
}
"L" => {
point.0 -= count;
(current.0 - count..current.0)
.rev()
.map(|x| (x + 1, current.1))
.collect::<Vec<(i32, i32)>>()
}
"R" => {
point.0 += count;
(current.0..current.0 + count)
.map(|x| (x + 1, current.1))
.collect::<Vec<(i32, i32)>>()
}
c => panic!("Unknown direction {} encountered", c),
};
println!("finished line {} at {:?}", dir, point);
ret
})
.collect()
}
fn find_smallest_intersection_distance(
path1: Vec<(i32, i32)>,
path2: Vec<(i32, i32)>,
) -> Option<i32> {
HashSet::<(i32, i32)>::from_iter(path1)
.intersection(&HashSet::<(i32, i32)>::from_iter(path2))
.map(|(x, y)| x + y)
.min()
}
fn part1(input: &str) {
let mut input_lines = input.lines();
let path1 = parse_path(input_lines.next().expect("First path missing from input"));
let path2 = parse_path(input_lines.next().expect("Second path missing from input"));
let dist =
find_smallest_intersection_distance(path1, path2).expect("could not find an intersection");
println!("Smallest intersection is {}", dist);
}
fn part2(input: &str) {
let mut input_lines = input.lines();
let path1 = parse_path(input_lines.next().expect("First path missing from input"));
let path2 = parse_path(input_lines.next().expect("Second path missing from input"));
// let dist = find_smallest_intersection_path_length(&path1, &path2)
// .expect("could not find an intersection");
//
// println!("Shortest sum of distances to an intersection is {}", dist);
}
fn main() {
let input = include_str!("../../input/day03/input");
let mut args = env::args();
let prog_name = args.next().expect("unable to get program name");
let maybe_arg = args.next();
let maybe_arg_str = maybe_arg.as_ref().map(String::as_str);
match maybe_arg_str {
Some("part1") => part1(input),
Some("part2") => part2(input),
_ => {
eprintln!("usage: {} (part1|part2)", prog_name);
std::process::exit(1);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn aoc19_day3_part1_example_1() {
let path1 = parse_path("R8,U5,L5,D3");
let path2 = parse_path("U7,R6,D4,L4");
println!("Path 1: {:?}, Path 2: {:?}", &path1, &path2);
let dist = find_smallest_intersection_distance(path1, path2)
.expect("could not find an intersection");
assert_eq!(dist, 6);
panic!("bad");
}
#[test]
fn aoc19_day3_part1_example_2() {
let path1 = parse_path("R75,D30,R83,U83,L12,D49,R71,U7,L72");
let path2 = parse_path("U62,R66,U55,R34,D71,R55,D58,R83");
let dist = find_smallest_intersection_distance(path1, path2)
.expect("could not find an intersection");
assert_eq!(dist, 159);
}
#[test]
fn aoc19_day3_part1_example_3() {
let path1 = parse_path("R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51");
let path2 = parse_path("U98,R91,D20,R16,D67,R40,U7,R15,U6,R7");
let dist = find_smallest_intersection_distance(path1, path2)
.expect("could not find an intersection");
assert_eq!(dist, 135);
}
#[test]
fn aoc19_day3_part2_example_1() {
let path1 = parse_path("R8,U5,L5,D3");
let path2 = parse_path("U7,R6,D4,L4");
// let dist = find_smallest_intersection_path_length(&path1, &path2)
// .expect("could not find an intersection");
//
// assert_eq!(dist, 30);
}
#[test]
fn aoc19_day3_part2_example_2() {
let path1 = parse_path("R75,D30,R83,U83,L12,D49,R71,U7,L72");
let path2 = parse_path("U62,R66,U55,R34,D71,R55,D58,R83");
// let dist = find_smallest_intersection_path_length(&path1, &path2)
// .expect("could not find an intersection");
//
// assert_eq!(dist, 610);
}
#[test]
fn aoc19_day3_part2_example_3() {
let path1 = parse_path("R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51");
let path2 = parse_path("U98,R91,D20,R16,D67,R40,U7,R15,U6,R7");
// let dist = find_smallest_intersection_path_length(&path1, &path2)
// .expect("could not find an intersection");
//
// assert_eq!(dist, 410);
}
}
|
use lazy_static::lazy_static;
use pulldown_cmark::{Event, Tag};
use regex::Regex;
use std::iter::Peekable;
pub struct EmbedYoutube<'a, I: Iterator<Item = Event<'a>>> {
parent: Peekable<I>,
}
impl<'a, I: Iterator<Item = Event<'a>>> EmbedYoutube<'a, I> {
pub fn new(parent: I) -> Self {
Self {
parent: parent.peekable(),
}
}
}
impl<'a, I: Iterator<Item = Event<'a>>> Iterator for EmbedYoutube<'a, I> {
type Item = Event<'a>;
fn next(&mut self) -> Option<Self::Item> {
let start = match self.parent.next()? {
start @ Event::Start(Tag::Paragraph) => start,
other => return Some(other),
};
let text = match self.parent.peek()? {
Event::Text(ref text) => text,
_ => return Some(start),
};
lazy_static! {
static ref RE: Regex =
Regex::new(r"^https?://www\.youtube\.com/watch\?v=([A-Za-z0-9_-]+)$").unwrap();
}
let captures = match RE.captures(text) {
Some(captures) => captures,
_ => return Some(start),
};
let video_ref = &captures[1];
let embedded = format!(
r#"
<div class="video-wrapper">
<div class="video-container">
<iframe src="//www.youtube.com/embed/{video_ref}" frameborder="0" allowfullscreen="1">
</iframe>
</div>
</div>"#
);
// Consume the text and ending paragraph, which we should replace with the embedded video.
self.parent.next(); // Skip the Text
if self.parent.next() != Some(Event::End(Tag::Paragraph)) {
panic!("Bare Youtube link not ending with a paragraph");
}
Some(Event::Html(embedded.into()))
}
}
#[cfg(test)]
mod tests {
use super::*;
use pulldown_cmark::{html, Options, Parser};
fn convert(s: &str) -> String {
let parser = Parser::new_ext(s, Options::all());
let transformed = EmbedYoutube::new(parser);
let mut body = String::new();
html::push_html(&mut body, transformed);
body
}
#[test]
fn test_embed_youtube() {
let s = "Before
http://www.youtube.com/watch?v=eoKDyhxCVm0
After";
assert_eq!(
convert(s).trim_end(),
r#"<p>Before</p>
<div class="video-wrapper">
<div class="video-container">
<iframe src="//www.youtube.com/embed/eoKDyhxCVm0" frameborder="0" allowfullscreen="1">
</iframe>
</div>
</div>
<p>After</p>"#
);
}
}
|
use crate::vec::{vec3, Vec3};
use rand::prelude::*;
use rand::rngs::SmallRng;
#[allow(dead_code)]
pub fn degrees_to_radians(degrees: f64) -> f64 {
return degrees * std::f64::consts::PI / 180.0;
}
pub fn clamp(x: f64, min: f64, max: f64) -> f64 {
if x < min {
return min;
}
if x > max {
return max;
}
return x;
}
pub fn random_double(rng: &mut SmallRng) -> f64 {
rng.gen()
}
pub fn random_to_sphere(radius: f64, dist_squared: f64, rng: &mut SmallRng) -> Vec3 {
let r1 = random_double(rng);
let r2 = random_double(rng);
let phi = 2.0 * std::f64::consts::PI * r1;
let z = 1.0 + r2 * (f64::sqrt(1.0 - radius * radius / dist_squared) - 1.0);
let x = f64::cos(phi) * f64::sqrt(1.0 - z * z);
let y = f64::sin(phi) * f64::sqrt(1.0 - z * z);
vec3(x, y, z)
}
|
use std::env;
use bson::doc;
use crate::Client;
type Result<T> = anyhow::Result<T>;
struct TempVars {
restore: Vec<(&'static str, Option<std::ffi::OsString>)>,
}
impl TempVars {
#[must_use]
fn set(vars: &[(&'static str, &str)]) -> TempVars {
let mut restore = vec![];
for (name, value) in vars {
restore.push((*name, env::var_os(name)));
env::set_var(name, value);
}
Self { restore }
}
}
impl Drop for TempVars {
fn drop(&mut self) {
for (name, value) in self.restore.drain(..) {
if let Some(v) = value {
env::set_var(name, v);
} else {
env::remove_var(name);
}
}
}
}
async fn check_faas_handshake(vars: &[(&'static str, &str)]) -> Result<()> {
let _tv = TempVars::set(vars);
let client = Client::test_builder().build().await;
client.list_database_names(doc! {}, None).await?;
Ok(())
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn valid_aws() -> Result<()> {
check_faas_handshake(&[
("AWS_EXECUTION_ENV", "AWS_Lambda_java8"),
("AWS_REGION", "us-east-2"),
("AWS_LAMBDA_FUNCTION_MEMORY_SIZE", "1024"),
])
.await
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn valid_azure() -> Result<()> {
check_faas_handshake(&[("FUNCTIONS_WORKER_RUNTIME", "node")]).await
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn valid_gcp() -> Result<()> {
check_faas_handshake(&[
("K_SERVICE", "servicename"),
("FUNCTION_MEMORY_MB", "1024"),
("FUNCTION_TIMEOUT_SEC", "60"),
("FUNCTION_REGION", "us-central1"),
])
.await
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn valid_vercel() -> Result<()> {
check_faas_handshake(&[
("VERCEL", "1"),
("VERCEL_URL", "*.vercel.app"),
("VERCEL_REGION", "cdg1"),
])
.await
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn invalid_multiple_providers() -> Result<()> {
check_faas_handshake(&[
("AWS_EXECUTION_ENV", "AWS_Lambda_java8"),
("FUNCTIONS_WORKER_RUNTIME", "node"),
])
.await
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn invalid_long_string() -> Result<()> {
check_faas_handshake(&[
("AWS_EXECUTION_ENV", "AWS_Lambda_java8"),
("AWS_REGION", &"a".repeat(512)),
])
.await
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn invalid_wrong_type() -> Result<()> {
check_faas_handshake(&[
("AWS_EXECUTION_ENV", "AWS_Lambda_java8"),
("AWS_LAMBDA_FUNCTION_MEMORY_SIZE", "big"),
])
.await
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn invalid_aws_not_lambda() -> Result<()> {
check_faas_handshake(&[("AWS_EXECUTION_ENV", "EC2")]).await
}
|
use schema::TextOptions;
use schema::U32Options;
use rustc_serialize::json::Json;
use schema::Value;
/// Possible error that may occur while parsing a field value
/// At this point the JSON is known to be valid.
#[derive(Debug)]
pub enum ValueParsingError {
/// Encounterred a numerical value that overflows or underflow its integer type.
OverflowError(String),
/// The json node is not of the correct type. (e.g. 3 for a `Str` type or `"abc"` for a u32 type)
/// Tantivy will try to autocast values.
TypeError(String),
}
/// A `FieldType` describes the type (text, u32) of a field as well as
/// how it should be handled by tantivy.
#[derive(Clone, Debug, RustcDecodable, RustcEncodable)]
pub enum FieldType {
/// String field type configuration
Str(TextOptions),
/// U32 field type configuration
U32(U32Options),
}
impl FieldType {
/// Parses a field value from json, given the target FieldType.
///
/// Tantivy will not try to cast values.
/// For instance, If the json value is the integer `3` and the
/// target field is a `Str`, this method will return an Error.
pub fn value_from_json(&self, json: &Json) -> Result<Value, ValueParsingError> {
match *json {
Json::String(ref field_text) => {
match *self {
FieldType::Str(_) => {
Ok(Value::Str(field_text.clone()))
}
FieldType::U32(_) => {
Err(ValueParsingError::TypeError(format!("Expected a u32 int, got {:?}", json)))
}
}
}
Json::U64(ref field_val_u64) => {
match *self {
FieldType::U32(_) => {
if *field_val_u64 > (u32::max_value() as u64) {
Err(ValueParsingError::OverflowError(format!("Expected u32, but value {:?} overflows.", field_val_u64)))
}
else {
Ok(Value::U32(*field_val_u64 as u32))
}
}
_ => {
Err(ValueParsingError::TypeError(format!("Expected a string, got {:?}", json)))
}
}
},
_ => {
Err(ValueParsingError::TypeError(format!("Expected a string or a u32, got {:?}", json)))
}
}
}
} |
use super::*;
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct Color(pub u16);
impl Color {
const_new!();
bitfield_int!(u16; 0..=4: u8, red, with_red, set_red);
bitfield_int!(u16; 5..=9: u8, green, with_green, set_green);
bitfield_int!(u16; 10..=14: u8, blue, with_blue, set_blue);
}
impl Color {
pub const fn from_rgb(red: u8, green: u8, blue: u8) -> Self {
let r = red as u16;
let g = green as u16;
let b = blue as u16;
Self(b << 10 | g << 5 | r)
}
}
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Implementation of the trait instance and the instance structures implementing it.
//! (For not instantiable traits there is still the inherent instance implemented).
use super::DeclStorageDefExt;
use proc_macro2::{Span, TokenStream};
use quote::quote;
const NUMBER_OF_INSTANCE: usize = 16;
pub(crate) const INHERENT_INSTANCE_NAME: &str = "__InherentHiddenInstance";
// Used to generate an instance implementation.
struct InstanceDef {
prefix: String,
instance_struct: syn::Ident,
doc: TokenStream,
}
pub fn decl_and_impl(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStream {
let mut impls = TokenStream::new();
impls.extend(reexport_instance_trait(scrate, def));
// Implementation of instances.
if let Some(module_instance) = &def.module_instance {
let instance_defs = (0..NUMBER_OF_INSTANCE)
.map(|i| {
let name = format!("Instance{}", i);
InstanceDef {
instance_struct: syn::Ident::new(&name, proc_macro2::Span::call_site()),
prefix: name,
doc: quote!(#[doc=r"Module instance"]),
}
})
.chain(module_instance.instance_default.as_ref().map(|ident| InstanceDef {
prefix: String::new(),
instance_struct: ident.clone(),
doc: quote!(#[doc=r"Default module instance"]),
}));
for instance_def in instance_defs {
impls.extend(create_and_impl_instance_struct(scrate, &instance_def, def));
}
}
// The name of the inherently available instance.
let inherent_instance = syn::Ident::new(INHERENT_INSTANCE_NAME, Span::call_site());
// Implementation of inherent instance.
if let Some(default_instance) =
def.module_instance.as_ref().and_then(|i| i.instance_default.as_ref())
{
impls.extend(quote! {
/// Hidden instance generated to be internally used when module is used without
/// instance.
#[doc(hidden)]
pub type #inherent_instance = #default_instance;
});
} else {
let instance_def = InstanceDef {
prefix: String::new(),
instance_struct: inherent_instance,
doc: quote!(
/// Hidden instance generated to be internally used when module is used without
/// instance.
#[doc(hidden)]
),
};
impls.extend(create_and_impl_instance_struct(scrate, &instance_def, def));
}
impls
}
fn reexport_instance_trait(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStream {
if let Some(i) = def.module_instance.as_ref() {
let instance_trait = &i.instance_trait;
quote!(
/// Local import of frame_support::traits::Instance
// This import is not strictly needed but made in order not to have breaking change.
use #scrate::traits::Instance as #instance_trait;
)
} else {
quote!()
}
}
fn create_and_impl_instance_struct(
scrate: &TokenStream,
instance_def: &InstanceDef,
def: &DeclStorageDefExt,
) -> TokenStream {
let instance_trait = quote!( #scrate::traits::Instance );
let instance_struct = &instance_def.instance_struct;
let prefix = format!("{}{}", instance_def.prefix, def.crate_name.to_string());
let doc = &instance_def.doc;
quote! {
// Those trait are derived because of wrong bounds for generics
#[derive(
Clone, Eq, PartialEq,
#scrate::codec::Encode,
#scrate::codec::Decode,
#scrate::RuntimeDebug,
)]
#doc
pub struct #instance_struct;
impl #instance_trait for #instance_struct {
const PREFIX: &'static str = #prefix;
}
}
}
|
use rocket::fairing::{Fairing, Info, Kind};
use rocket::http::Header;
use rocket::{http::Method, http::Status, Request, Response};
use std::io::Cursor;
use crate::config::CONFIG;
pub struct CorsFairing;
impl Fairing for CorsFairing {
fn info(&self) -> Info {
Info {
name: "Add CORS headers",
kind: Kind::Response,
}
}
fn on_response(&self, request: &Request, response: &mut Response) {
response.set_header(Header::new(
"Access-Control-Allow-Origin",
&CONFIG.allowed_origin,
));
response.set_header(Header::new(
"Access-Control-Allow-Methods",
"DELETE, GET, HEAD, OPTIONS, PATCH, POST, PUT",
));
response.set_header(Header::new("Access-Control-Allow-Headers", "*"));
response.set_header(Header::new("Access-Control-Allow-Credentials", "true"));
// NOTE: replace status code and body if not found
if response.status() == Status::NotFound && request.method() == Method::Options {
response.set_status(Status::NoContent);
response.set_sized_body(Cursor::new(""));
}
}
}
|
#[derive(Debug, PartialEq)]
pub enum Direction {
North,
South,
East,
West,
Left,
Right,
Forward,
}
#[derive(Debug, PartialEq)]
pub struct Instruction {
direction: Direction,
units: i32,
}
#[derive(Debug, PartialEq)]
pub struct Ship {
instructions: Vec<Instruction>,
curr_direction: Direction,
curr_position: (i32, i32), //north-south, east-west
waypoint: (i32, i32),
waypoint_directions: (Direction, Direction),
}
impl Ship {
pub fn process_instructions(&mut self) {
for i in self.instructions.iter() {
match i.direction {
Direction::North => self.curr_position.0 += i.units,
Direction::South => self.curr_position.0 -= i.units,
Direction::East => self.curr_position.1 += i.units,
Direction::West => self.curr_position.1 -= i.units,
Direction::Left => {
self.curr_direction =
Ship::change_direction(&self.curr_direction, Direction::Left, i.units)
}
Direction::Right => {
self.curr_direction =
Ship::change_direction(&self.curr_direction, Direction::Right, i.units)
}
Direction::Forward => match self.curr_direction {
Direction::North => self.curr_position.0 += i.units,
Direction::South => self.curr_position.0 -= i.units,
Direction::East => self.curr_position.1 += i.units,
Direction::West => self.curr_position.1 -= i.units,
_ => panic!("Invalid current direction"),
},
}
}
}
pub fn change_direction(
original_direction: &Direction,
turn_direction: Direction,
units: i32,
) -> Direction {
match turn_direction {
Direction::Left => {
return Ship::turn_left(original_direction, units);
}
Direction::Right => {
return Ship::turn_right(original_direction, units);
}
_ => panic!("Only L and R change directions!"),
}
}
pub fn process_instructions2(&mut self) {
for i in self.instructions.iter() {
match i.direction {
Direction::North => {
match self.waypoint_directions.0 {
Direction::North => self.waypoint.0 += i.units,
Direction::South => self.waypoint.0 -= i.units,
_ => (),
}
match self.waypoint_directions.1 {
Direction::North => self.waypoint.1 += i.units,
Direction::South => self.waypoint.1 -= i.units,
_ => (),
}
}
Direction::South => {
match self.waypoint_directions.0 {
Direction::North => self.waypoint.0 -= i.units,
Direction::South => self.waypoint.0 += i.units,
_ => (),
}
match self.waypoint_directions.1 {
Direction::North => self.waypoint.1 -= i.units,
Direction::South => self.waypoint.1 += i.units,
_ => (),
}
}
Direction::East => {
match self.waypoint_directions.0 {
Direction::East => self.waypoint.0 += i.units,
Direction::West => self.waypoint.0 -= i.units,
_ => (),
}
match self.waypoint_directions.1 {
Direction::East => self.waypoint.1 += i.units,
Direction::West => self.waypoint.1 -= i.units,
_ => (),
}
}
Direction::West => {
match self.waypoint_directions.0 {
Direction::East => self.waypoint.0 -= i.units,
Direction::West => self.waypoint.0 += i.units,
_ => (),
}
match self.waypoint_directions.1 {
Direction::East => self.waypoint.1 -= i.units,
Direction::West => self.waypoint.1 += i.units,
_ => (),
}
}
Direction::Left => {
self.waypoint_directions.0 = Ship::change_direction(
&self.waypoint_directions.0,
Direction::Left,
i.units,
);
self.waypoint_directions.1 = Ship::change_direction(
&self.waypoint_directions.1,
Direction::Left,
i.units,
);
}
Direction::Right => {
self.waypoint_directions.0 = Ship::change_direction(
&self.waypoint_directions.0,
Direction::Right,
i.units,
);
self.waypoint_directions.1 = Ship::change_direction(
&self.waypoint_directions.1,
Direction::Right,
i.units,
);
}
Direction::Forward => {
for _ in 0..i.units {
match self.waypoint_directions.0 {
Direction::North => self.curr_position.0 += self.waypoint.0,
Direction::South => self.curr_position.0 -= self.waypoint.0,
Direction::East => self.curr_position.1 += self.waypoint.0,
Direction::West => self.curr_position.1 -= self.waypoint.0,
_ => panic!("Waypoint direction 0 should be in {N, S, E, W}"),
}
match self.waypoint_directions.1 {
Direction::North => self.curr_position.0 += self.waypoint.1,
Direction::South => self.curr_position.0 -= self.waypoint.1,
Direction::East => self.curr_position.1 += self.waypoint.1,
Direction::West => self.curr_position.1 -= self.waypoint.1,
_ => panic!("Waypoint direction 1 should be in {N, S, E, W}"),
}
}
}
}
}
}
fn turn_left(original_direction: &Direction, units: i32) -> Direction {
match original_direction {
Direction::North => {
if units == 90 {
return Direction::West;
} else if units == 180 {
return Direction::South;
} else if units == 270 {
return Direction::East;
}
}
Direction::South => {
if units == 90 {
return Direction::East;
} else if units == 180 {
return Direction::North;
} else if units == 270 {
return Direction::West;
}
}
Direction::East => {
if units == 90 {
return Direction::North;
} else if units == 180 {
return Direction::West;
} else if units == 270 {
return Direction::South;
}
}
Direction::West => {
if units == 90 {
return Direction::South;
} else if units == 180 {
return Direction::East;
} else if units == 270 {
return Direction::North;
}
}
_ => panic!("curr_direction needs to be one of {N, S, E, W}"),
}
panic!("Turn left failed");
}
fn turn_right(original_direction: &Direction, units: i32) -> Direction {
match original_direction {
Direction::North => {
if units == 90 {
return Direction::East;
} else if units == 180 {
return Direction::South;
} else if units == 270 {
return Direction::West;
}
}
Direction::South => {
if units == 90 {
return Direction::West;
} else if units == 180 {
return Direction::North;
} else if units == 270 {
return Direction::East;
}
}
Direction::East => {
if units == 90 {
return Direction::South;
} else if units == 180 {
return Direction::West;
} else if units == 270 {
return Direction::North;
}
}
Direction::West => {
if units == 90 {
return Direction::North;
} else if units == 180 {
return Direction::East;
} else if units == 270 {
return Direction::South;
}
}
_ => panic!("curr_direction needs to be one of {N, S, E, W}"),
}
panic!("Turn left failed");
}
}
pub fn input_generator(input: &str) -> Ship {
let instructions = input
.lines()
.map(|l| match l.chars().next().unwrap() {
'N' => Instruction {
direction: Direction::North,
units: l[1..].parse::<i32>().unwrap(),
},
'S' => Instruction {
direction: Direction::South,
units: l[1..].parse::<i32>().unwrap(),
},
'E' => Instruction {
direction: Direction::East,
units: l[1..].parse::<i32>().unwrap(),
},
'W' => Instruction {
direction: Direction::West,
units: l[1..].parse::<i32>().unwrap(),
},
'L' => Instruction {
direction: Direction::Left,
units: l[1..].parse::<i32>().unwrap(),
},
'R' => Instruction {
direction: Direction::Right,
units: l[1..].parse::<i32>().unwrap(),
},
'F' => Instruction {
direction: Direction::Forward,
units: l[1..].parse::<i32>().unwrap(),
},
_ => panic!("Invalid input!"),
})
.collect();
Ship {
instructions,
curr_direction: Direction::East,
curr_position: (0, 0),
waypoint: (1, 10),
waypoint_directions: (Direction::North, Direction::East),
}
}
#[aoc(day12, part1)]
pub fn part1(input: &str) -> usize {
let mut ship = input_generator(input);
ship.process_instructions();
(ship.curr_position.0.abs() + ship.curr_position.1.abs()) as usize
}
#[aoc(day12, part2)]
pub fn part2(input: &str) -> usize {
let mut ship = input_generator(input);
ship.process_instructions2();
(ship.curr_position.0.abs() + ship.curr_position.1.abs()) as usize
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_input() {
assert_eq!(
input_generator("F10\nN3"),
Ship {
instructions: vec![
Instruction {
direction: Direction::Forward,
units: 10
},
Instruction {
direction: Direction::North,
units: 3
}
],
curr_direction: Direction::East,
curr_position: (0, 0),
waypoint: (1, 10),
waypoint_directions: (Direction::North, Direction::East),
}
);
}
#[test]
fn test_part1() {
assert_eq!(part1("F10\nN3\nF7\nR90\nF11"), 25);
}
#[test]
fn test_part2() {
assert_eq!(part2("F10\nN3\nF7\nR90\nF11"), 286);
assert_eq!(
part2(
"N3\nL90\nF63\nW5\nF46\nE3\nF22\nN2\nR90\nF68\nE4\nW3\nR90\nW4\nR180\nE1\nS5\nF90"
),
2510
);
}
}
|
pub fn a(input: String) -> String {
let mut grid = input.lines().map(|l| l.chars().cycle());
let mut trees = 0;
grid.next();
let mut y_axis = 3;
for mut line in grid {
let c = line.nth(y_axis).unwrap();
if c == '#' {
trees += 1;
}
y_axis += 3;
}
format!("{}", trees)
}
struct Slope {
right: usize,
down: usize,
}
pub fn b(input: String) -> String {
let slopes = &[
Slope { right: 1, down: 1 },
Slope { right: 3, down: 1 },
Slope { right: 5, down: 1 },
Slope { right: 7, down: 1 },
Slope { right: 1, down: 2 },
];
slopes
.iter()
.map(|s| {
let grid = input.lines().map(|l| l.chars().cycle()).skip(s.down);
let mut y_axis = (s.right..).step_by(s.right);
grid.step_by(s.down)
.map(|mut line| line.nth(y_axis.next().unwrap()).unwrap())
.filter(|c| *c == '#')
.count()
})
.product::<usize>()
.to_string()
}
|
#[doc = "Reader of register DDRPHYC_BISTBER0"]
pub type R = crate::R<u32, super::DDRPHYC_BISTBER0>;
#[doc = "Reader of field `ABER`"]
pub type ABER_R = crate::R<u32, u32>;
impl R {
#[doc = "Bits 0:31 - ABER"]
#[inline(always)]
pub fn aber(&self) -> ABER_R {
ABER_R::new((self.bits & 0xffff_ffff) as u32)
}
}
|
#[doc = "Register `DDRCTRL_DFIUPD2` reader"]
pub type R = crate::R<DDRCTRL_DFIUPD2_SPEC>;
#[doc = "Register `DDRCTRL_DFIUPD2` writer"]
pub type W = crate::W<DDRCTRL_DFIUPD2_SPEC>;
#[doc = "Field `DFI_PHYUPD_EN` reader - DFI_PHYUPD_EN"]
pub type DFI_PHYUPD_EN_R = crate::BitReader;
#[doc = "Field `DFI_PHYUPD_EN` writer - DFI_PHYUPD_EN"]
pub type DFI_PHYUPD_EN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 31 - DFI_PHYUPD_EN"]
#[inline(always)]
pub fn dfi_phyupd_en(&self) -> DFI_PHYUPD_EN_R {
DFI_PHYUPD_EN_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 31 - DFI_PHYUPD_EN"]
#[inline(always)]
#[must_use]
pub fn dfi_phyupd_en(&mut self) -> DFI_PHYUPD_EN_W<DDRCTRL_DFIUPD2_SPEC, 31> {
DFI_PHYUPD_EN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DDRCTRL DFI update register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ddrctrl_dfiupd2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ddrctrl_dfiupd2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DDRCTRL_DFIUPD2_SPEC;
impl crate::RegisterSpec for DDRCTRL_DFIUPD2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ddrctrl_dfiupd2::R`](R) reader structure"]
impl crate::Readable for DDRCTRL_DFIUPD2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ddrctrl_dfiupd2::W`](W) writer structure"]
impl crate::Writable for DDRCTRL_DFIUPD2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DDRCTRL_DFIUPD2 to value 0x8000_0000"]
impl crate::Resettable for DDRCTRL_DFIUPD2_SPEC {
const RESET_VALUE: Self::Ux = 0x8000_0000;
}
|
use std::rc::Rc;
use dprint_core::formatting::*;
use dprint_core::formatting::{parser_helpers::*,condition_resolvers, conditions::*};
use swc_ecmascript::ast::*;
use swc_common::{comments::{Comment, CommentKind}, BytePos, Span, Spanned};
use swc_ecmascript::parser::{token::{TokenAndSpan}};
use super::*;
use super::swc::*;
use super::super::configuration::*;
use super::super::swc::ParsedSourceFile;
use super::super::utils;
use super::swc::{get_flattened_bin_expr};
pub fn parse<'a>(source_file: &'a ParsedSourceFile, config: &Configuration) -> PrintItems {
let module = Node::Module(&source_file.module);
let mut context = Context::new(
config,
&source_file.leading_comments,
&source_file.trailing_comments,
&source_file.tokens,
&source_file.file_bytes,
module,
&source_file.info
);
let mut items = parse_node(Node::Module(&source_file.module), &mut context);
items.push_condition(if_true(
"endOfFileNewLine",
|context| Some(context.writer_info.column_number > 0 || context.writer_info.line_number > 0),
Signal::NewLine.into()
));
#[cfg(debug_assertions)]
context.assert_end_of_file_state();
items
}
fn parse_node<'a>(node: Node<'a>, context: &mut Context<'a>) -> PrintItems {
parse_node_with_inner_parse(node, context, |items, _| items)
}
fn parse_node_with_inner_parse<'a>(node: Node<'a>, context: &mut Context<'a>, inner_parse: impl FnOnce(PrintItems, &mut Context<'a>) -> PrintItems) -> PrintItems {
// println!("Node kind: {:?}", node.kind());
// println!("Text: {:?}", node.text(context));
// store info
let past_current_node = std::mem::replace(&mut context.current_node, node.clone());
let parent_hi = past_current_node.hi();
context.parent_stack.push(past_current_node);
// handle decorators (since their starts can come before their parent)
let mut items = handle_decorators_if_necessary(&node, context);
// now that decorators might have been parsed, assert the node order to ensure comments are parsed correctly
#[cfg(debug_assertions)]
assert_parsed_in_order(&node, context);
// parse item
let node_span_data = node.span_data();
let node_hi = node_span_data.hi;
let node_lo = node_span_data.lo;
let has_ignore_comment: bool;
// get the leading comments
if get_first_child_owns_leading_comments_on_same_line(&node, context) {
// Some block comments should belong to the first child rather than the
// parent node because their first child may end up on the next line.
let leading_comments = context.comments.leading_comments(node_lo);
has_ignore_comment = get_has_ignore_comment(&leading_comments, &node_lo, context);
let node_start_line = node.start_line(context);
let leading_comments_on_previous_lines = leading_comments
.take_while(|c| c.kind == CommentKind::Line || c.start_line(context) < node_start_line)
.collect::<Vec<&'a Comment>>();
items.extend(parse_comment_collection(leading_comments_on_previous_lines.into_iter(), None, None, context));
} else {
let leading_comments = context.comments.leading_comments_with_previous(node_lo);
has_ignore_comment = get_has_ignore_comment(&leading_comments, &node_lo, context);
items.extend(parse_comments_as_leading(&node_span_data, leading_comments, context));
}
// parse the node
if has_ignore_comment {
items.push_str(""); // force the current line indentation
items.extend(inner_parse(parser_helpers::parse_raw_string(&node.text(context)), context));
// mark any previous comments as handled
for comment in context.comments.trailing_comments_with_previous(node_hi) {
if comment.lo() < node_hi {
context.mark_comment_handled(comment);
}
}
} else {
items.extend(inner_parse(parse_node_inner(node, context), context));
}
// get the trailing comments
if node_hi != parent_hi || context.parent().kind() == NodeKind::Module {
let trailing_comments = context.comments.trailing_comments_with_previous(node_hi);
items.extend(parse_comments_as_trailing(&node_span_data, trailing_comments, context));
}
let items = if let Some(info) = context.take_current_before_comments_start_info() {
let mut new_items = PrintItems::new();
new_items.push_info(info);
new_items.extend(items);
new_items
} else {
items
};
// pop info
context.current_node = context.parent_stack.pop();
return items;
fn parse_node_inner<'a>(node: Node<'a>, context: &mut Context<'a>) -> PrintItems {
match node {
/* class */
Node::ClassMethod(node) => parse_class_method(node, context),
Node::ClassProp(node) => parse_class_prop(node, context),
Node::Constructor(node) => parse_constructor(node, context),
Node::Decorator(node) => parse_decorator(node, context),
Node::TsParamProp(node) => parse_parameter_prop(node, context),
Node::PrivateMethod(node) => parse_private_method(node, context),
Node::PrivateName(node) => parse_private_name(node, context),
Node::PrivateProp(node) => parse_private_prop(node, context),
/* clauses */
Node::CatchClause(node) => parse_catch_clause(node, context),
/* common */
Node::ComputedPropName(node) => parse_computed_prop_name(node, context),
Node::Ident(node) => parse_identifier(node, context),
/* declarations */
Node::ClassDecl(node) => parse_class_decl(node, context),
Node::ExportDecl(node) => parse_export_decl(node, context),
Node::ExportDefaultDecl(node) => parse_export_default_decl(node, context),
Node::ExportDefaultExpr(node) => parse_export_default_expr(node, context),
Node::FnDecl(node) => parse_function_decl(node, context),
Node::ImportDecl(node) => parse_import_decl(node, context),
Node::NamedExport(node) => parse_export_named_decl(node, context),
Node::Param(node) => parse_param(node, context),
Node::TsEnumDecl(node) => parse_enum_decl(node, context),
Node::TsEnumMember(node) => parse_enum_member(node, context),
Node::TsImportEqualsDecl(node) => parse_import_equals_decl(node, context),
Node::TsInterfaceDecl(node) => parse_interface_decl(node, context),
Node::TsModuleDecl(node) => parse_module_decl(node, context),
Node::TsNamespaceDecl(node) => parse_namespace_decl(node, context),
Node::TsTypeAliasDecl(node) => parse_type_alias(node, context),
/* expressions */
Node::ArrayLit(node) => parse_array_expr(node, context),
Node::ArrowExpr(node) => parse_arrow_func_expr(node, context),
Node::AssignExpr(node) => parse_assignment_expr(node, context),
Node::AwaitExpr(node) => parse_await_expr(node, context),
Node::BinExpr(node) => parse_binary_expr(node, context),
Node::CallExpr(node) => parse_call_expr(node, context),
Node::ClassExpr(node) => parse_class_expr(node, context),
Node::CondExpr(node) => parse_conditional_expr(node, context),
Node::ExprOrSpread(node) => parse_expr_or_spread(node, context),
Node::FnExpr(node) => parse_fn_expr(node, context),
Node::GetterProp(node) => parse_getter_prop(node, context),
Node::KeyValueProp(node) => parse_key_value_prop(node, context),
Node::MemberExpr(node) => parse_member_expr(node, context),
Node::MetaPropExpr(node) => parse_meta_prop_expr(node, context),
Node::NewExpr(node) => parse_new_expr(node, context),
Node::ObjectLit(node) => parse_object_lit(node, context),
Node::OptChainExpr(node) => parse_node((&node.expr).into(), context),
Node::ParenExpr(node) => parse_paren_expr(node, context),
Node::SeqExpr(node) => parse_sequence_expr(node, context),
Node::SetterProp(node) => parse_setter_prop(node, context),
Node::SpreadElement(node) => parse_spread_element(node, context),
Node::Super(_) => "super".into(),
Node::TaggedTpl(node) => parse_tagged_tpl(node, context),
Node::ThisExpr(_) => "this".into(),
Node::Tpl(node) => parse_tpl(node, context),
Node::TplElement(node) => parse_tpl_element(node, context),
Node::TsAsExpr(node) => parse_as_expr(node, context),
Node::TsConstAssertion(node) => parse_const_assertion(node, context),
Node::TsExprWithTypeArgs(node) => parse_expr_with_type_args(node, context),
Node::TsNonNullExpr(node) => parse_non_null_expr(node, context),
Node::TsTypeAssertion(node) => parse_type_assertion(node, context),
Node::UnaryExpr(node) => parse_unary_expr(node, context),
Node::UpdateExpr(node) => parse_update_expr(node, context),
Node::YieldExpr(node) => parse_yield_expr(node, context),
/* exports */
Node::ExportNamedSpecifier(node) => parse_export_named_specifier(node, context),
Node::ExportNamespaceSpecifier(node) => parse_namespace_export_specifier(node, context),
/* imports */
Node::ImportNamedSpecifier(node) => parse_import_named_specifier(node, context),
Node::ImportStarAsSpecifier(node) => parse_import_namespace_specifier(node, context),
Node::ImportDefaultSpecifier(node) => parse_node((&node.local).into(), context),
Node::TsExternalModuleRef(node) => parse_external_module_ref(node, context),
/* interface / type element */
Node::TsCallSignatureDecl(node) => parse_call_signature_decl(node, context),
Node::TsConstructSignatureDecl(node) => parse_construct_signature_decl(node, context),
Node::TsIndexSignature(node) => parse_index_signature(node, context),
Node::TsInterfaceBody(node) => parse_interface_body(node, context),
Node::TsMethodSignature(node) => parse_method_signature(node, context),
Node::TsPropertySignature(node) => parse_property_signature(node, context),
Node::TsTypeLit(node) => parse_type_lit(node, context),
/* jsx */
Node::JSXAttr(node) => parse_jsx_attribute(node, context),
Node::JSXClosingElement(node) => parse_jsx_closing_element(node, context),
Node::JSXClosingFragment(node) => parse_jsx_closing_fragment(node, context),
Node::JSXElement(node) => parse_jsx_element(node, context),
Node::JSXEmptyExpr(node) => parse_jsx_empty_expr(node, context),
Node::JSXExprContainer(node) => parse_jsx_expr_container(node, context),
Node::JSXFragment(node) => parse_jsx_fragment(node, context),
Node::JSXMemberExpr(node) => parse_jsx_member_expr(node, context),
Node::JSXNamespacedName(node) => parse_jsx_namespaced_name(node, context),
Node::JSXOpeningElement(node) => parse_jsx_opening_element(node, context),
Node::JSXOpeningFragment(node) => parse_jsx_opening_fragment(node, context),
Node::JSXSpreadChild(node) => parse_jsx_spread_child(node, context),
Node::JSXText(node) => parse_jsx_text(node, context),
/* literals */
Node::BigInt(node) => parse_big_int_literal(node, context),
Node::Bool(node) => parse_bool_literal(node),
Node::Null(_) => "null".into(),
Node::Number(node) => parse_num_literal(node, context),
Node::Regex(node) => parse_reg_exp_literal(node, context),
Node::Str(node) => parse_string_literal(node, context),
/* module */
Node::Module(node) => parse_module(node, context),
/* patterns */
Node::ArrayPat(node) => parse_array_pat(node, context),
Node::AssignPat(node) => parse_assign_pat(node, context),
Node::AssignPatProp(node) => parse_assign_pat_prop(node, context),
Node::KeyValuePatProp(node) => parse_key_value_pat_prop(node, context),
Node::RestPat(node) => parse_rest_pat(node, context),
Node::ObjectPat(node) => parse_object_pat(node, context),
/* properties */
Node::MethodProp(node) => parse_method_prop(node, context),
/* statements */
Node::BlockStmt(node) => parse_block_stmt(node, context),
Node::BreakStmt(node) => parse_break_stmt(node, context),
Node::ContinueStmt(node) => parse_continue_stmt(node, context),
Node::DebuggerStmt(node) => parse_debugger_stmt(node, context),
Node::DoWhileStmt(node) => parse_do_while_stmt(node, context),
Node::ExportAll(node) => parse_export_all(node, context),
Node::ExprStmt(node) => parse_expr_stmt(node, context),
Node::EmptyStmt(node) => parse_empty_stmt(node, context),
Node::ForInStmt(node) => parse_for_in_stmt(node, context),
Node::ForOfStmt(node) => parse_for_of_stmt(node, context),
Node::ForStmt(node) => parse_for_stmt(node, context),
Node::IfStmt(node) => parse_if_stmt(node, context),
Node::LabeledStmt(node) => parse_labeled_stmt(node, context),
Node::ReturnStmt(node) => parse_return_stmt(node, context),
Node::SwitchStmt(node) => parse_switch_stmt(node, context),
Node::SwitchCase(node) => parse_switch_case(node, context),
Node::ThrowStmt(node) => parse_throw_stmt(node, context),
Node::TryStmt(node) => parse_try_stmt(node, context),
Node::TsExportAssignment(node) => parse_export_assignment(node, context),
Node::TsNamespaceExportDecl(node) => parse_namespace_export(node, context),
Node::VarDecl(node) => parse_var_decl(node, context),
Node::VarDeclarator(node) => parse_var_declarator(node, context),
Node::WhileStmt(node) => parse_while_stmt(node, context),
/* types */
Node::TsArrayType(node) => parse_array_type(node, context),
Node::TsConditionalType(node) => parse_conditional_type(node, context),
Node::TsConstructorType(node) => parse_constructor_type(node, context),
Node::TsFnType(node) => parse_function_type(node, context),
Node::TsImportType(node) => parse_import_type(node, context),
Node::TsIndexedAccessType(node) => parse_indexed_access_type(node, context),
Node::TsInferType(node) => parse_infer_type(node, context),
Node::TsIntersectionType(node) => parse_intersection_type(node, context),
Node::TsLitType(node) => parse_lit_type(node, context),
Node::TsMappedType(node) => parse_mapped_type(node, context),
Node::TsOptionalType(node) => parse_optional_type(node, context),
Node::TsQualifiedName(node) => parse_qualified_name(node, context),
Node::TsParenthesizedType(node) => parse_parenthesized_type(node, context),
Node::TsRestType(node) => parse_rest_type(node, context),
Node::TsThisType(_) => "this".into(),
Node::TsTupleType(node) => parse_tuple_type(node, context),
Node::TsTupleElement(node) => parse_tuple_element(node, context),
Node::TsTypeAnn(node) => parse_type_ann(node, context),
Node::TsTypeParam(node) => parse_type_param(node, context),
Node::TsTypeParamDecl(node) => parse_type_parameters(TypeParamNode::Decl(node), context),
Node::TsTypeParamInstantiation(node) => parse_type_parameters(TypeParamNode::Instantiation(node), context),
Node::TsTypeOperator(node) => parse_type_operator(node, context),
Node::TsTypePredicate(node) => parse_type_predicate(node, context),
Node::TsTypeQuery(node) => parse_type_query(node, context),
Node::TsTypeRef(node) => parse_type_reference(node, context),
Node::TsUnionType(node) => parse_union_type(node, context),
/* unknown */
_ => parse_raw_string(node.text(context).into()),
}
}
#[inline]
fn handle_decorators_if_necessary<'a>(node: &Node<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
// decorators in these cases will have starts before their parent so they need to be handled specially
if let Node::ExportDecl(decl) = node {
if let Decl::Class(class_decl) = &decl.decl {
items.extend(parse_decorators(&class_decl.class.decorators, false, context));
}
} else if let Node::ExportDefaultDecl(decl) = node {
if let DefaultDecl::Class(class_expr) = &decl.decl {
items.extend(parse_decorators(&class_expr.class.decorators, false, context));
}
}
return items;
}
#[inline]
fn get_first_child_owns_leading_comments_on_same_line(node: &Node, context: &mut Context) -> bool {
match node {
Node::TsUnionType(_) | Node::TsIntersectionType(_) => {
let node_start_line = node.start_line(context);
node.leading_comments(context)
.filter(|c| c.kind == CommentKind::Block && c.start_line(context) == node_start_line)
.next().is_some()
},
_ => false,
}
}
#[inline]
fn get_has_ignore_comment<'a>(leading_comments: &CommentsIterator<'a>, node_lo: &BytePos, context: &mut Context<'a>) -> bool {
return if let Some(last_comment) = get_last_comment(leading_comments, node_lo, context) {
parser_helpers::text_has_dprint_ignore(&last_comment.text, &context.config.ignore_node_comment_text)
} else {
false
};
#[inline]
fn get_last_comment<'a>(leading_comments: &CommentsIterator<'a>, node_lo: &BytePos, context: &mut Context<'a>) -> Option<&'a Comment> {
return match context.parent() {
Node::JSXElement(jsx_element) => get_last_comment_for_jsx_children(&jsx_element.children, node_lo, context),
Node::JSXFragment(jsx_fragment) => get_last_comment_for_jsx_children(&jsx_fragment.children, node_lo, context),
_ => leading_comments.get_last_comment(),
};
fn get_last_comment_for_jsx_children<'a>(children: &Vec<JSXElementChild>, node_lo: &BytePos, context: &mut Context<'a>) -> Option<&'a Comment> {
let index = children.binary_search_by_key(node_lo, |child| child.lo()).ok()?;
for i in (0..index).rev() {
match children.get(i)? {
JSXElementChild::JSXExprContainer(expr_container) => {
return match expr_container.expr {
JSXExpr::JSXEmptyExpr(empty_expr) => {
get_jsx_empty_expr_comments(&empty_expr, context).last()
},
_ => None,
};
},
JSXElementChild::JSXText(jsx_text) => {
if !jsx_text.text(context).trim().is_empty() { return None; }
}
_=> return None,
}
}
None
}
}
}
#[cfg(debug_assertions)]
fn assert_parsed_in_order(node: &Node, context: &mut Context) {
let node_pos = node.lo().0;
if context.last_parsed_node_pos > node_pos {
// When this panic happens it means that a node with a start further
// along in the file has been "parsed" before this current node. When
// that occurs, comments that this node "owns" might have been shifted
// over to the further along node since "forgotten" comments get
// prepended when a node is being parsed.
//
// Do the following steps to solve:
//
// 1. Uncomment the lines in `parse_node_with_inner_parse` in order to
// display the node kinds.
// 2. Add a test that reproduces the issue then run the tests and see
// where it panics and how that node looks. Ensure the node widths
// are correct. If not, that's a bug in swc, so go fix it in swc.
// 3. If it's not a bug in swc, then check the parsing code to ensure
// the nodes are being parsed in order.
panic!("Debug panic! Node comments retrieved out of order!");
}
context.last_parsed_node_pos = node_pos;
}
}
/* class */
fn parse_class_method<'a>(node: &'a ClassMethod, context: &mut Context<'a>) -> PrintItems {
// todo: consolidate with private method
return parse_class_or_object_method(ClassOrObjectMethod {
parameters_span_data: node.get_parameters_span_data(context),
decorators: Some(&node.function.decorators),
accessibility: node.accessibility,
is_static: node.is_static,
is_async: node.function.is_async,
is_abstract: node.is_abstract,
kind: node.kind.into(),
is_generator: node.function.is_generator,
is_optional: node.is_optional,
key: (&node.key).into(),
type_params: node.function.type_params.as_ref().map(|x| x.into()),
params: node.function.params.iter().map(|x| x.into()).collect(),
return_type: node.function.return_type.as_ref().map(|x| x.into()),
body: node.function.body.as_ref().map(|x| x.into()),
}, context);
}
fn parse_private_method<'a>(node: &'a PrivateMethod, context: &mut Context<'a>) -> PrintItems {
return parse_class_or_object_method(ClassOrObjectMethod {
parameters_span_data: node.get_parameters_span_data(context),
decorators: Some(&node.function.decorators),
accessibility: node.accessibility,
is_static: node.is_static,
is_async: node.function.is_async,
is_abstract: node.is_abstract,
kind: node.kind.into(),
is_generator: node.function.is_generator,
is_optional: node.is_optional,
key: (&node.key).into(),
type_params: node.function.type_params.as_ref().map(|x| x.into()),
params: node.function.params.iter().map(|x| x.into()).collect(),
return_type: node.function.return_type.as_ref().map(|x| x.into()),
body: node.function.body.as_ref().map(|x| x.into()),
}, context);
}
fn parse_class_prop<'a>(node: &'a ClassProp, context: &mut Context<'a>) -> PrintItems {
parse_class_prop_common(ParseClassPropCommon {
key: (&node.key).into(),
value: &node.value,
type_ann: &node.type_ann,
is_static: node.is_static,
decorators: &node.decorators,
computed: node.computed,
is_declare: node.declare,
accessibility: &node.accessibility,
is_abstract: node.is_abstract,
is_optional: node.is_optional,
readonly: node.readonly,
definite: node.definite,
}, context)
}
fn parse_constructor<'a>(node: &'a Constructor, context: &mut Context<'a>) -> PrintItems {
parse_class_or_object_method(ClassOrObjectMethod {
parameters_span_data: node.get_parameters_span_data(context),
decorators: None,
accessibility: node.accessibility,
is_static: false,
is_async: false,
is_abstract: false,
kind: ClassOrObjectMethodKind::Constructor,
is_generator: false,
is_optional: node.is_optional,
key: (&node.key).into(),
type_params: None,
params: node.params.iter().map(|x| x.into()).collect(),
return_type: None,
body: node.body.as_ref().map(|x| x.into()),
}, context)
}
fn parse_decorator<'a>(node: &'a Decorator, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("@");
items.extend(parse_node((&node.expr).into(), context));
return items;
}
fn parse_parameter_prop<'a>(node: &'a TsParamProp, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_decorators(&node.decorators, true, context));
if let Some(accessibility) = node.accessibility {
items.push_str(&format!("{} ", accessibility_to_str(&accessibility)));
}
if node.readonly { items.push_str("readonly "); }
items.extend(parse_node((&node.param).into(), context));
return items;
}
fn parse_private_name<'a>(node: &'a PrivateName, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("#");
items.extend(parse_node((&node.id).into(), context));
items
}
fn parse_private_prop<'a>(node: &'a PrivateProp, context: &mut Context<'a>) -> PrintItems {
parse_class_prop_common(ParseClassPropCommon {
key: (&node.key).into(),
value: &node.value,
type_ann: &node.type_ann,
is_static: node.is_static,
decorators: &node.decorators,
computed: node.computed,
is_declare: false,
accessibility: &node.accessibility,
is_abstract: node.is_abstract,
is_optional: node.is_optional,
readonly: node.readonly,
definite: node.definite,
}, context)
}
struct ParseClassPropCommon<'a> {
pub key: Node<'a>,
pub value: &'a Option<Box<Expr>>,
pub type_ann: &'a Option<TsTypeAnn>,
pub is_static: bool,
pub decorators: &'a Vec<Decorator>,
pub computed: bool,
pub is_declare: bool,
pub accessibility: &'a Option<Accessibility>,
pub is_abstract: bool,
pub is_optional: bool,
pub readonly: bool,
pub definite: bool,
}
fn parse_class_prop_common<'a>(node: ParseClassPropCommon<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_decorators(node.decorators, false, context));
if node.is_declare { items.push_str("declare "); }
if let Some(accessibility) = node.accessibility {
items.push_str(&format!("{} ", accessibility_to_str(accessibility)));
}
if node.is_static { items.push_str("static "); }
if node.is_abstract { items.push_str("abstract "); }
if node.readonly { items.push_str("readonly "); }
let key_span_data = node.key.span_data();
let key_items = parse_node(node.key, context);
items.extend(if node.computed {
parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: key_span_data,
inner_items: key_items
}, context)
} else {
key_items
});
if node.is_optional { items.push_str("?"); }
if node.definite { items.push_str("!"); }
items.extend(parse_type_ann_with_colon_if_exists(node.type_ann, context));
if let Some(value) = node.value {
items.extend(parse_assignment(value.into(), "=", context));
}
if context.config.semi_colons.is_true() {
items.push_str(";");
}
return items;
}
/* clauses */
fn parse_catch_clause<'a>(node: &'a CatchClause, context: &mut Context<'a>) -> PrintItems {
// a bit overkill since the param will currently always just be an identifer
let start_header_info = Info::new("catchClauseHeaderStart");
let end_header_info = Info::new("catchClauseHeaderEnd");
let mut items = PrintItems::new();
items.push_info(start_header_info);
items.push_str("catch");
if let Some(param) = &node.param {
items.push_str(" (");
items.extend(parse_node(param.into(), context));
items.push_str(")");
}
items.push_info(end_header_info);
let single_body_position = if let Node::TryStmt(try_stmt) = context.parent() {
if try_stmt.finalizer.is_some() { Some(SingleBodyPosition::NextLine) } else { None }
} else {
None
};
// not conditional... required
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: (&node.body).into(),
use_braces: UseBraces::Always,
brace_position: context.config.try_statement_brace_position,
single_body_position,
requires_braces_condition_ref: None,
header_start_token: None,
start_header_info: Some(start_header_info),
end_header_info: Some(end_header_info),
}, context).parsed_node);
return items;
}
/* common */
fn parse_computed_prop_name<'a>(node: &'a ComputedPropName, context: &mut Context<'a>) -> PrintItems {
parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: node.expr.span_data(),
inner_items: parse_node((&node.expr).into(), context),
}, context)
}
fn parse_identifier<'a>(node: &'a Ident, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str(&node.sym as &str);
if node.optional {
items.push_str("?");
}
if let Node::VarDeclarator(node) = context.parent() {
if node.definite {
items.push_str("!");
}
}
items.extend(parse_type_ann_with_colon_if_exists(&node.type_ann, context));
return items;
}
/* declarations */
fn parse_class_decl<'a>(node: &'a ClassDecl, context: &mut Context<'a>) -> PrintItems {
return parse_class_decl_or_expr(ClassDeclOrExpr {
span_data: node.class.span,
decorators: &node.class.decorators,
is_class_expr: false,
is_declare: node.declare,
is_abstract: node.class.is_abstract,
ident: Some((&node.ident).into()),
type_params: node.class.type_params.as_ref().map(|x| x.into()),
super_class: node.class.super_class.as_ref().map(|x| x.into()),
super_type_params: node.class.super_type_params.as_ref().map(|x| x.into()),
implements: node.class.implements.iter().map(|x| x.into()).collect(),
members: node.class.body.iter().map(|x| x.into()).collect(),
brace_position: context.config.class_declaration_brace_position,
}, context);
}
struct ClassDeclOrExpr<'a> {
span_data: Span,
decorators: &'a Vec<Decorator>,
is_class_expr: bool,
is_declare: bool,
is_abstract: bool,
ident: Option<Node<'a>>,
type_params: Option<Node<'a>>,
super_class: Option<Node<'a>>,
super_type_params: Option<Node<'a>>,
implements: Vec<Node<'a>>,
members: Vec<Node<'a>>,
brace_position: BracePosition,
}
fn parse_class_decl_or_expr<'a>(node: ClassDeclOrExpr<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_before_owned_comments_info = context.get_or_create_current_before_comments_start_info();
// parse decorators
let parent_kind = context.parent().kind();
if parent_kind != NodeKind::ExportDecl && parent_kind != NodeKind::ExportDefaultDecl {
items.extend(parse_decorators(node.decorators, node.is_class_expr, context));
}
// parse header and body
let start_header_info = Info::new("startHeader");
items.push_info(start_header_info);
if node.is_declare { items.push_str("declare "); }
if node.is_abstract { items.push_str("abstract "); }
items.push_str("class");
if let Some(ident) = node.ident {
items.push_str(" ");
items.extend(parse_node(ident, context));
}
if let Some(type_params) = node.type_params {
items.extend(parse_node(type_params, context));
}
if let Some(super_class) = node.super_class {
items.push_condition(conditions::new_line_if_hanging_space_otherwise(conditions::NewLineIfHangingSpaceOtherwiseOptions {
start_info: start_header_info,
end_info: None,
space_char: Some(conditions::if_above_width_or(context.config.indent_width, Signal::SpaceOrNewLine.into(), " ".into()).into()),
}));
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
items.push_str("extends ");
items.extend(new_line_group({
let mut items = PrintItems::new();
items.extend(parse_node(super_class, context));
if let Some(super_type_params) = node.super_type_params {
items.extend(parse_node(super_type_params, context));
}
items
}));
items
}));
}
items.extend(parse_extends_or_implements(ParseExtendsOrImplementsOptions {
text: "implements",
type_items: node.implements,
start_header_info,
prefer_hanging: context.config.implements_clause_prefer_hanging,
}, context));
items.extend(parse_membered_body(ParseMemberedBodyOptions {
span_data: node.span_data,
members: node.members,
start_header_info: Some(start_header_info),
brace_position: node.brace_position,
should_use_blank_line: move |previous, next, context| {
node_helpers::has_separating_blank_line(previous, next, context)
},
separator: Separator::none(),
}, context));
if node.is_class_expr {
let items = items.into_rc_path();
if_true_or(
"classExprConditionalIndent",
move |context| {
let resolved_start_before_comments_info = context.get_resolved_info(&start_before_owned_comments_info)?;
if resolved_start_before_comments_info.is_start_of_line() {
Some(false)
} else {
condition_resolvers::is_multiple_lines(context, &start_before_owned_comments_info, &start_header_info)
}
},
with_indent(items.clone().into()),
items.into(),
).into()
} else {
items
}
}
fn parse_export_decl<'a>(node: &'a ExportDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
// decorators are handled in parse_node because their starts come before the ExportDecl
items.push_str("export ");
items.extend(parse_node((&node.decl).into(), context));
items
}
fn parse_export_default_decl<'a>(node: &'a ExportDefaultDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
// decorators are handled in parse_node because their starts come before the ExportDefaultDecl
items.push_str("export default ");
items.extend(parse_node((&node.decl).into(), context));
items
}
fn parse_export_default_expr<'a>(node: &'a ExportDefaultExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("export default ");
items.extend(parse_node((&node.expr).into(), context));
if context.config.semi_colons.is_true() { items.push_str(";"); }
items
}
fn parse_enum_decl<'a>(node: &'a TsEnumDecl, context: &mut Context<'a>) -> PrintItems {
let start_header_info = Info::new("startHeader");
let mut items = PrintItems::new();
// header
items.push_info(start_header_info);
if node.declare { items.push_str("declare "); }
if node.is_const { items.push_str("const "); }
items.push_str("enum ");
items.extend(parse_node((&node.id).into(), context));
// body
let member_spacing = context.config.enum_declaration_member_spacing;
items.extend(parse_membered_body(ParseMemberedBodyOptions {
span_data: node.span,
members: node.members.iter().map(|x| x.into()).collect(),
start_header_info: Some(start_header_info),
brace_position: context.config.enum_declaration_brace_position,
should_use_blank_line: move |previous, next, context| {
match member_spacing {
MemberSpacing::BlankLine => true,
MemberSpacing::NewLine => false,
MemberSpacing::Maintain => node_helpers::has_separating_blank_line(previous, next, context),
}
},
separator: context.config.enum_declaration_trailing_commas.into(),
}, context));
return items;
}
fn parse_enum_member<'a>(node: &'a TsEnumMember, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.id).into(), context));
if let Some(init) = &node.init {
items.extend(parse_assignment(init.into(), "=", context));
}
items
}
fn parse_export_named_decl<'a>(node: &'a NamedExport, context: &mut Context<'a>) -> PrintItems {
// fill specifiers
let mut default_export: Option<&ExportDefaultSpecifier> = None;
let mut namespace_export: Option<&ExportNamespaceSpecifier> = None;
let mut named_exports: Vec<&ExportNamedSpecifier> = Vec::new();
for specifier in &node.specifiers {
match specifier {
ExportSpecifier::Default(node) => default_export = Some(node),
ExportSpecifier::Namespace(node) => namespace_export = Some(node),
ExportSpecifier::Named(node) => named_exports.push(node),
}
}
let should_single_line = default_export.is_none() && namespace_export.is_none()
&& named_exports.len() <= 1
&& node.start_line(context) == node.end_line(context);
// parse
let mut items = PrintItems::new();
items.push_str("export ");
if node.type_only { items.push_str("type "); }
if let Some(default_export) = default_export {
items.extend(parse_node(default_export.into(), context));
} else if !named_exports.is_empty() {
items.extend(parse_named_import_or_export_specifiers(
&node.into(),
named_exports.into_iter().map(|x| x.into()).collect(),
context
));
} else if let Some(namespace_export) = namespace_export {
items.extend(parse_node(namespace_export.into(), context));
} else {
items.push_str("{}");
}
if let Some(src) = &node.src {
items.push_str(" from ");
items.extend(parse_node(src.into(), context));
}
if context.config.semi_colons.is_true() {
items.push_str(";");
}
if should_single_line {
with_no_new_lines(items)
} else {
items
}
}
fn parse_function_decl<'a>(node: &'a FnDecl, context: &mut Context<'a>) -> PrintItems {
parse_function_decl_or_expr(FunctionDeclOrExprNode {
is_func_decl: true,
ident: Some(&node.ident),
declare: node.declare,
func: &node.function,
}, context)
}
struct FunctionDeclOrExprNode<'a> {
is_func_decl: bool,
ident: Option<&'a Ident>,
declare: bool,
func: &'a Function,
}
fn parse_function_decl_or_expr<'a>(node: FunctionDeclOrExprNode<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_header_info = Info::new("functionHeaderStart");
let func = node.func;
let space_after_function_keyword = !node.is_func_decl && context.config.function_expression_space_after_function_keyword;
items.push_info(start_header_info);
if node.declare { items.push_str("declare "); }
if func.is_async { items.push_str("async "); }
items.push_str("function");
if func.is_generator { items.push_str("*"); }
if space_after_function_keyword {
items.push_str(" ")
}
if let Some(ident) = node.ident {
if !space_after_function_keyword {
items.push_str(" ");
}
items.extend(parse_node(ident.into(), context));
}
if let Some(type_params) = &func.type_params { items.extend(parse_node(type_params.into(), context)); }
if get_use_space_before_parens(node.is_func_decl, context) {
if node.ident.is_some() || func.type_params.is_some() || !space_after_function_keyword {
items.push_str(" ");
}
}
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
nodes: func.params.iter().map(|node| node.into()).collect(),
span_data: func.get_parameters_span_data(context),
custom_close_paren: |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info: start_header_info,
type_node: func.return_type.as_ref().map(|x| x.into()),
type_node_separator: None,
param_count: func.params.len(),
}, context)),
is_parameters: true,
}, context));
if let Some(body) = &func.body {
let brace_position = if node.is_func_decl {
context.config.function_declaration_brace_position
} else {
context.config.function_expression_brace_position
};
let open_brace_token = context.token_finder.get_first_open_brace_token_within(body);
items.extend(parse_brace_separator(ParseBraceSeparatorOptions {
brace_position: brace_position,
open_brace_token: open_brace_token,
start_header_info: Some(start_header_info),
}, context));
items.extend(parse_node(body.into(), context));
} else {
if context.config.semi_colons.is_true() {
items.push_str(";");
}
}
return items;
fn get_use_space_before_parens(is_func_decl: bool, context: &mut Context) -> bool {
if is_func_decl {
context.config.function_declaration_space_before_parentheses
} else {
context.config.function_expression_space_before_parentheses
}
}
}
fn parse_param<'a>(node: &'a Param, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_decorators(&node.decorators, true, context));
items.extend(parse_node((&node.pat).into(), context));
items
}
fn parse_import_decl<'a>(node: &'a ImportDecl, context: &mut Context<'a>) -> PrintItems {
// fill specifiers
let mut default_import: Option<&ImportDefaultSpecifier> = None;
let mut namespace_import: Option<&ImportStarAsSpecifier> = None;
let mut named_imports: Vec<&ImportNamedSpecifier> = Vec::new();
for specifier in &node.specifiers {
match specifier {
ImportSpecifier::Default(node) => default_import = Some(node),
ImportSpecifier::Namespace(node) => namespace_import = Some(node),
ImportSpecifier::Named(node) => named_imports.push(node),
}
}
let should_single_line = default_import.is_none() && namespace_import.is_none()
&& named_imports.len() <= 1
&& node.start_line(context) == node.end_line(context);
let has_named_imports = !named_imports.is_empty() || {
let from_keyword = context.token_finder.get_previous_token_if_from_keyword(&node.src);
if let Some(from_keyword) = from_keyword {
context.token_finder.get_previous_token_if_close_brace(from_keyword).is_some()
} else {
false
}
};
let has_from = default_import.is_some() || namespace_import.is_some() || has_named_imports;
let mut items = PrintItems::new();
items.push_str("import ");
if node.type_only { items.push_str("type "); }
if let Some(default_import) = default_import {
items.extend(parse_node(default_import.into(), context));
if namespace_import.is_some() || !named_imports.is_empty() {
items.push_str(", ");
}
}
if let Some(namespace_import) = namespace_import {
items.extend(parse_node(namespace_import.into(), context));
}
if has_named_imports {
items.extend(parse_named_import_or_export_specifiers(
&node.into(),
named_imports.into_iter().map(|x| x.into()).collect(),
context
));
}
if has_from { items.push_str(" from "); }
items.extend(parse_node((&node.src).into(), context));
if context.config.semi_colons.is_true() {
items.push_str(";");
}
if should_single_line {
with_no_new_lines(items)
} else {
items
}
}
fn parse_import_equals_decl<'a>(node: &'a TsImportEqualsDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if node.is_export {
items.push_str("export ");
}
items.push_str("import ");
items.extend(parse_node((&node.id).into(), context));
items.push_str(" = "); // keep on one line
items.extend(parse_node((&node.module_ref).into(), context));
if context.config.semi_colons.is_true() { items.push_str(";"); }
return items;
}
fn parse_interface_decl<'a>(node: &'a TsInterfaceDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_header_info = Info::new("startHeader");
items.push_info(start_header_info);
context.store_info_for_node(node, start_header_info);
if node.declare { items.push_str("declare "); }
items.push_str("interface ");
items.extend(parse_node((&node.id).into(), context));
if let Some(type_params) = &node.type_params { items.extend(parse_node(type_params.into(), context)); }
items.extend(parse_extends_or_implements(ParseExtendsOrImplementsOptions {
text: "extends",
type_items: node.extends.iter().map(|x| x.into()).collect(),
start_header_info,
prefer_hanging: context.config.extends_clause_prefer_hanging,
}, context));
items.extend(parse_node((&node.body).into(), context));
return items;
}
fn parse_module_decl<'a>(node: &'a TsModuleDecl, context: &mut Context<'a>) -> PrintItems {
parse_module_or_namespace_decl(ModuleOrNamespaceDecl {
span_data: node.span,
declare: node.declare,
global: node.global,
id: (&node.id).into(),
body: node.body.as_ref(),
}, context)
}
fn parse_namespace_decl<'a>(node: &'a TsNamespaceDecl, context: &mut Context<'a>) -> PrintItems {
parse_module_or_namespace_decl(ModuleOrNamespaceDecl {
span_data: node.span,
declare: node.declare,
global: node.global,
id: (&node.id).into(),
body: Some(&node.body)
}, context)
}
struct ModuleOrNamespaceDecl<'a> {
pub span_data: Span,
pub declare: bool,
pub global: bool,
pub id: Node<'a>,
pub body: Option<&'a TsNamespaceBody>,
}
fn parse_module_or_namespace_decl<'a>(node: ModuleOrNamespaceDecl<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_header_info = Info::new("startHeader");
items.push_info(start_header_info);
if node.declare { items.push_str("declare "); }
if !node.global {
let module_or_namespace_keyword = context.token_finder.get_previous_token(&node.id).unwrap();
let has_namespace_keyword = context.token_finder.get_char_at(&module_or_namespace_keyword.span.lo()) == 'n';
items.push_str(if has_namespace_keyword { "namespace " } else { "module " });
}
items.extend(parse_node(node.id.into(), context));
items.extend(parse_body(node.body, start_header_info, context));
return items;
fn parse_body<'a>(body: Option<&'a TsNamespaceBody>, start_header_info: Info, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if let Some(body) = &body {
match body {
TsNamespaceBody::TsModuleBlock(block) => {
items.extend(parse_membered_body(ParseMemberedBodyOptions {
span_data: block.span,
members: block.body.iter().map(|x| x.into()).collect(),
start_header_info: Some(start_header_info),
brace_position: context.config.module_declaration_brace_position,
should_use_blank_line: move |previous, next, context| {
node_helpers::has_separating_blank_line(previous, next, context)
},
separator: Separator::none(),
}, context));
},
TsNamespaceBody::TsNamespaceDecl(decl) => {
items.push_str(".");
items.extend(parse_node((&decl.id).into(), context));
items.extend(parse_body(Some(&*decl.body), start_header_info, context));
}
}
}
else if context.config.semi_colons.is_true() {
items.push_str(";");
}
return items;
}
}
fn parse_type_alias<'a>(node: &'a TsTypeAliasDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if node.declare { items.push_str("declare "); }
items.push_str("type ");
items.extend(parse_node((&node.id).into(), context));
if let Some(type_params) = &node.type_params {
items.extend(parse_node(type_params.into(), context));
}
items.extend(parse_assignment((&node.type_ann).into(), "=", context));
if context.config.semi_colons.is_true() { items.push_str(";"); }
return items;
}
/* exports */
fn parse_named_import_or_export_specifiers<'a>(parent: &Node<'a>, specifiers: Vec<Node<'a>>, context: &mut Context<'a>) -> PrintItems {
return parse_object_like_node(ParseObjectLikeNodeOptions {
node_span_data: parent.span_data(),
members: specifiers,
separator: get_trailing_commas(parent, context).into(),
prefer_hanging: get_prefer_hanging(parent, context),
prefer_single_line: get_prefer_single_line(parent, context),
surround_single_line_with_spaces: get_use_space(parent, context),
}, context);
fn get_trailing_commas(parent_decl: &Node, context: &Context) -> TrailingCommas {
match parent_decl {
Node::NamedExport(_) => context.config.export_declaration_trailing_commas,
Node::ImportDecl(_) => context.config.import_declaration_trailing_commas,
_ => unreachable!(),
}
}
fn get_use_space(parent_decl: &Node, context: &Context) -> bool {
match parent_decl {
Node::NamedExport(_) => context.config.export_declaration_space_surrounding_named_exports,
Node::ImportDecl(_) => context.config.import_declaration_space_surrounding_named_imports,
_ => unreachable!(),
}
}
fn get_prefer_hanging(parent_decl: &Node, context: &Context) -> bool {
match parent_decl {
Node::NamedExport(_) => context.config.export_declaration_prefer_hanging,
Node::ImportDecl(_) => context.config.import_declaration_prefer_hanging,
_ => unreachable!(),
}
}
fn get_prefer_single_line(parent_decl: &Node, context: &Context) -> bool {
match parent_decl {
Node::NamedExport(_) => context.config.export_declaration_prefer_single_line,
Node::ImportDecl(_) => context.config.import_declaration_prefer_single_line,
_ => unreachable!(),
}
}
}
/* expressions */
fn parse_array_expr<'a>(node: &'a ArrayLit, context: &mut Context<'a>) -> PrintItems {
parse_array_like_nodes(ParseArrayLikeNodesOptions {
parent_span_data: node.span,
nodes: node.elems.iter().map(|x| x.as_ref().map(|elem| elem.into())).collect(),
prefer_hanging: context.config.array_expression_prefer_hanging,
prefer_single_line: context.config.array_expression_prefer_single_line,
trailing_commas: context.config.array_expression_trailing_commas,
}, context)
}
fn parse_arrow_func_expr<'a>(node: &'a ArrowExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let header_start_info = Info::new("arrowFunctionExpressionHeaderStart");
let should_use_parens = get_should_use_parens(&node, context);
items.push_info(header_start_info);
if node.is_async { items.push_str("async "); }
if let Some(type_params) = &node.type_params { items.extend(parse_node(type_params.into(), context)); }
if should_use_parens {
// need to check if there are parens because parse_parameters_or_arguments depends on the parens existing
if has_parens(node, context) {
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: node.params.iter().map(|node| node.into()).collect(),
custom_close_paren: |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info: header_start_info,
type_node: node.return_type.as_ref().map(|x| x.into()),
type_node_separator: None,
param_count: node.params.len(),
}, context)),
is_parameters: true,
}, context));
} else {
// todo: this should probably use more of the same logic as in parse_parameters_or_arguments
// there will only be one param in this case
items.push_str("(");
items.extend(parse_node(node.params.first().unwrap().into(), context));
items.push_str(")");
}
} else {
items.extend(parse_node(node.params.first().unwrap().into(), context));
}
items.push_str(" =>");
let parsed_body = parse_node((&node.body).into(), context);
let parsed_body = if use_new_line_group_for_arrow_body(node) { new_line_group(parsed_body) } else { parsed_body }.into_rc_path();
let open_brace_token = match &node.body {
BlockStmtOrExpr::BlockStmt(stmt) => context.token_finder.get_first_open_brace_token_within(stmt),
_ => None,
};
if open_brace_token.is_some() {
items.extend(parse_brace_separator(ParseBraceSeparatorOptions {
brace_position: context.config.arrow_function_brace_position,
open_brace_token: open_brace_token,
start_header_info: Some(header_start_info),
}, context));
items.extend(parsed_body.into());
} else {
let start_body_info = Info::new("startBody");
let end_body_info = Info::new("endBody");
items.push_info(start_body_info);
if should_not_newline_after_arrow(&node.body) {
items.push_str(" ");
} else {
items.push_condition(conditions::if_above_width_or(
context.config.indent_width,
if_true_or("newlineOrSpace", move |context| {
condition_resolvers::is_multiple_lines(context, &start_body_info, &end_body_info)
}, Signal::NewLine.into(), Signal::SpaceOrNewLine.into()).into(),
" ".into()
));
}
items.push_condition(conditions::indent_if_start_of_line(parsed_body.into()));
items.push_info(end_body_info);
}
return items;
fn should_not_newline_after_arrow(body: &BlockStmtOrExpr) -> bool {
match body {
BlockStmtOrExpr::BlockStmt(_) => true,
BlockStmtOrExpr::Expr(expr) => {
match &**expr {
Expr::Paren(_) | Expr::Array(_) => true,
_ => false,
}
}
}
}
fn get_should_use_parens(node: &ArrowExpr, context: &mut Context) -> bool {
let requires_parens = node.params.len() != 1 || node.return_type.is_some() || is_first_param_not_identifier_or_has_type_annotation(&node.params);
return if requires_parens {
true
} else {
match context.config.arrow_function_use_parentheses {
UseParentheses::Force => true,
UseParentheses::PreferNone => false,
UseParentheses::Maintain => has_parens(&node, context),
}
};
fn is_first_param_not_identifier_or_has_type_annotation(params: &Vec<Pat>) -> bool {
let first_param = params.iter().next();
match first_param {
Some(Pat::Ident(node)) => node.type_ann.is_some(),
_ => true
}
}
}
fn has_parens(node: &ArrowExpr, context: &mut Context) -> bool {
if node.params.len() != 1 {
true
} else {
// checking for a close paren or comma is more reliable because of this scenario: `call(a => {})`
let param_end = node.params.first().unwrap().hi();
context.token_finder.get_next_token_if_comma(¶m_end).is_some()
|| context.token_finder.get_next_token_if_close_paren(¶m_end).is_some()
}
}
}
fn parse_as_expr<'a>(node: &'a TsAsExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.expr).into(), context));
items.push_str(" as");
items.push_signal(Signal::SpaceIfNotTrailing);
items.push_condition(conditions::with_indent_if_start_of_line_indented(parse_node((&node.type_ann).into(), context)));
items
}
fn parse_const_assertion<'a>(node: &'a TsConstAssertion, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.expr).into(), context));
items.push_str(" as const");
items
}
fn parse_assignment_expr<'a>(node: &'a AssignExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.left).into(), context));
items.extend(parse_assignment((&node.right).into(), node.op.as_str(), context));
items
}
fn parse_await_expr<'a>(node: &'a AwaitExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("await ");
items.extend(parse_node((&node.arg).into(), context));
items
}
fn parse_binary_expr<'a>(node: &'a BinExpr, context: &mut Context<'a>) -> PrintItems {
// todo: use a simplified version for nodes that don't need the complexity (for performance reasons)
let mut items = PrintItems::new();
let flattened_binary_expr = get_flattened_bin_expr(node, context);
// println!("Bin expr: {:?}", flattened_binary_expr.iter().map(|x| x.expr.text(context)).collect::<Vec<_>>());
let line_per_expression = context.config.binary_expression_line_per_expression;
let force_use_new_lines = !context.config.binary_expression_prefer_single_line
&& node_helpers::get_use_new_lines_for_nodes(&flattened_binary_expr[0].expr, if line_per_expression {
&flattened_binary_expr[1].expr
} else {
&flattened_binary_expr.last().unwrap().expr
}, context);
let indent_width = context.config.indent_width;
let binary_expr_start_info = Info::new("binExprStartInfo");
let allow_no_indent = get_allow_no_indent(node, context);
let use_space_surrounding_operator = get_use_space_surrounding_operator(&node.op, context);
let is_parent_bin_expr = context.parent().kind() == NodeKind::BinExpr;
let multi_line_options = {
let mut options = if line_per_expression {
parser_helpers::MultiLineOptions::same_line_no_indent()
} else {
parser_helpers::MultiLineOptions::maintain_line_breaks()
};
options.with_hanging_indent = if is_parent_bin_expr {
BoolOrCondition::Bool(false) // let the parent handle the indent
} else {
BoolOrCondition::Condition(Rc::new(Box::new(move |condition_context| {
let binary_expr_start_info = condition_context.get_resolved_info(&binary_expr_start_info)?;
if allow_no_indent && binary_expr_start_info.is_start_of_line() { return Some(false); }
Some(condition_resolvers::is_start_of_line(condition_context))
})))
};
options
};
items.push_info(binary_expr_start_info);
items.extend(parser_helpers::parse_separated_values(|_| {
let mut parsed_nodes = Vec::new();
for bin_expr_item in flattened_binary_expr.into_iter() {
let lines_span = Some(parser_helpers::LinesSpan{
start_line: bin_expr_item.expr.span_data().start_line(context),
end_line: bin_expr_item.expr.span_data().end_line(context)
});
let mut items = PrintItems::new();
let pre_op = bin_expr_item.pre_op;
let post_op = bin_expr_item.post_op;
let (leading_pre_op_comments, trailing_pre_op_comments) = if let Some(op) = &pre_op {
let op_line = op.token.start_line(context);
(parse_op_comments(
op.token.leading_comments(context).filter(|x|
x.kind == CommentKind::Block && x.start_line(context) == op_line
).collect(),
context
), parse_op_comments(
op.token.trailing_comments(context).filter(|x|
x.kind == CommentKind::Block && x.start_line(context) == op_line
).collect(),
context
))
} else { (PrintItems::new(), PrintItems::new()) };
let is_inner_binary_expression = bin_expr_item.expr.kind() == NodeKind::BinExpr;
items.extend(parse_node_with_inner_parse(bin_expr_item.expr, context, |node_items, context| {
let mut items = PrintItems::new();
if let Some(op) = pre_op {
if !leading_pre_op_comments.is_empty() {
items.extend(leading_pre_op_comments);
items.push_str(" ");
}
items.push_str(op.op.as_str());
if trailing_pre_op_comments.is_empty() {
if use_space_surrounding_operator {
items.push_str(" ");
}
} else {
items.push_str(" ");
items.extend(trailing_pre_op_comments);
items.push_str(" ");
}
}
items.extend(if is_inner_binary_expression {
let node_items = node_items.into_rc_path();
with_queued_indent(
// indent again if it hasn't done the current binary expression's hanging indent
if_true_or(
"indentIfNecessary",
move |context| {
let binary_expr_start_info = context.get_resolved_info(&binary_expr_start_info)?;
if allow_no_indent && binary_expr_start_info.is_start_of_line() { return Some(false); }
let is_hanging = binary_expr_start_info.indent_level < context.writer_info.indent_level;
Some(!is_hanging)
},
with_queued_indent(node_items.clone().into()),
node_items.into(),
).into()
)
} else {
node_items
});
if let Some(op) = post_op {
let op_line = op.token.start_line(context);
let leading_post_op_comments = parse_op_comments(
op.token.leading_comments(context).filter(|x|
x.kind == CommentKind::Block && x.start_line(context) == op_line
).collect(),
context
);
let trailing_post_op_comments = parse_op_comments(
op.token.trailing_comments(context).filter(|x|
x.start_line(context) == op_line
).collect(),
context
);
if leading_post_op_comments.is_empty() {
if use_space_surrounding_operator {
items.push_str(" ");
}
} else {
items.push_str(" ");
items.extend(leading_post_op_comments);
items.push_str(" ");
}
items.push_str(op.op.as_str());
if !trailing_post_op_comments.is_empty() {
items.push_str(" ");
items.extend(trailing_post_op_comments);
}
}
items
}));
parsed_nodes.push(parser_helpers::ParsedValue {
items: parser_helpers::new_line_group(items),
lines_span,
allow_inline_multi_line: true,
allow_inline_single_line: true,
});
}
parsed_nodes
}, parser_helpers::ParseSeparatedValuesOptions {
prefer_hanging: false,
force_use_new_lines,
allow_blank_lines: false,
single_line_space_at_start: false,
single_line_space_at_end: false,
single_line_separator: if use_space_surrounding_operator { Signal::SpaceOrNewLine.into() } else { PrintItems::new() },
indent_width,
multi_line_options,
force_possible_newline_at_start: false,
}).items);
return if node.op.is_equality() { parser_helpers::new_line_group(items) } else { items };
fn get_allow_no_indent(node: &BinExpr, context: &mut Context) -> bool {
let parent_kind = context.parent().kind();
if !node.op.is_add_sub()
&& !node.op.is_mul_div()
&& !node.op.is_logical()
&& !node.op.is_bit_logical()
&& !node.op.is_bit_shift()
&& node.op != BinaryOp::Mod
{
false
} else if parent_kind == NodeKind::ExprStmt || parent_kind == NodeKind::BinExpr {
false
} else {
// get if in an argument
match context.parent() {
Node::ExprOrSpread(_) => {
match context.parent_stack.get(1).expect("Expr or spread should always have a parent.").kind() {
NodeKind::CallExpr | NodeKind::NewExpr => false,
_ => true,
}
},
_ => true,
}
}
}
fn parse_op_comments(comments: Vec<&Comment>, context: &mut Context) -> PrintItems {
let mut items = PrintItems::new();
let mut had_comment_last = false;
for comment in comments {
if had_comment_last { items.push_str(" "); }
if let Some(comment) = parse_comment(&comment, context) {
items.extend(comment);
had_comment_last = true;
} else {
had_comment_last = false;
}
}
items
}
fn get_use_space_surrounding_operator(op: &BinaryOp, context: &mut Context) -> bool {
if op.is_bitwise_or_arithmetic() {
context.config.binary_expression_space_surrounding_bitwise_and_arithmetic_operator
} else {
true
}
}
}
fn parse_call_expr<'a>(node: &'a CallExpr, context: &mut Context<'a>) -> PrintItems {
return if is_test_library_call_expr(&node, context) {
parse_test_library_call_expr(node, context)
} else {
inner_parse(node, context)
};
fn inner_parse<'a>(node: &'a CallExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.callee).into(), context));
if let Some(type_args) = &node.type_args {
items.extend(parse_node(type_args.into(), context));
}
if is_optional(context) {
items.push_str("?.");
}
items.push_condition(conditions::with_indent_if_start_of_line_indented(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: node.args.iter().map(|node| node.into()).collect(),
custom_close_paren: |_| None,
is_parameters: false,
}, context)));
items
}
fn parse_test_library_call_expr<'a>(node: &'a CallExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_test_library_callee(&node.callee, context));
items.extend(parse_test_library_arguments(&node.args, context));
return items;
fn parse_test_library_callee<'a>(callee: &'a ExprOrSuper, context: &mut Context<'a>) -> PrintItems {
match callee {
ExprOrSuper::Expr(expr) => {
let expr = &**expr;
match expr {
Expr::Member(member_expr) => {
let mut items = PrintItems::new();
items.extend(parse_node((&member_expr.obj).into(), context));
items.push_str(".");
items.extend(parse_node((&member_expr.prop).into(), context));
items
},
_=> parse_node(expr.into(), context),
}
},
_ => parse_node(callee.into(), context),
}
}
fn parse_test_library_arguments<'a>(args: &'a Vec<ExprOrSpread>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("(");
items.extend(parse_node_with_inner_parse((&args[0]).into(), context, |items, _| {
let mut new_items = parser_helpers::with_no_new_lines(items);
new_items.push_str(",");
new_items
}));
items.push_str(" ");
items.extend(parse_node((&args[1]).into(), context));
items.push_str(")");
return items;
}
}
// Tests if this is a call expression from common test libraries.
// Be very strict here to allow the user to opt out if they'd like.
fn is_test_library_call_expr(node: &CallExpr, context: &mut Context) -> bool {
if node.args.len() != 2 || node.type_args.is_some() || !is_valid_callee(&node.callee) || is_optional(context) {
return false;
}
if (*node.args[0].expr).kind() != NodeKind::Str && !is_expr_template(&node.args[0].expr) {
return false;
}
if node.args[1].expr.kind() != NodeKind::FnExpr && node.args[1].expr.kind() != NodeKind::ArrowExpr {
return false;
}
return node.start_line(context) == node.args[1].start_line(context);
fn is_valid_callee(callee: &ExprOrSuper) -> bool {
return match get_first_identifier_text(&callee) {
Some("it") | Some("describe") | Some("test") => true,
_ => {
// support call expressions like `Deno.test("description", ...)`
match get_last_identifier_text(&callee) {
Some("test") => true,
_ => false,
}
},
};
fn get_first_identifier_text(callee: &ExprOrSuper) -> Option<&str> {
return match callee {
ExprOrSuper::Super(_) => None,
ExprOrSuper::Expr(expr) => {
match &**expr {
Expr::Ident(ident) => Some(&ident.sym),
Expr::Member(member) if (*member.prop).kind() == NodeKind::Ident => get_first_identifier_text(&member.obj),
_ => None,
}
}
};
}
fn get_last_identifier_text(callee: &ExprOrSuper) -> Option<&str> {
return match callee {
ExprOrSuper::Super(_) => None,
ExprOrSuper::Expr(expr) => get_last_identifier_text_from_expr(expr),
};
fn get_last_identifier_text_from_expr(expr: &Expr) -> Option<&str> {
match expr {
Expr::Ident(ident) => Some(&ident.sym),
Expr::Member(member) if (member.obj).kind() == NodeKind::Ident => get_last_identifier_text_from_expr(&member.prop),
_ => None,
}
}
}
}
}
fn is_optional(context: &Context) -> bool {
return context.parent().kind() == NodeKind::OptChainExpr;
}
}
fn parse_class_expr<'a>(node: &'a ClassExpr, context: &mut Context<'a>) -> PrintItems {
parse_class_decl_or_expr(ClassDeclOrExpr {
span_data: node.class.span,
decorators: &node.class.decorators,
is_class_expr: true,
is_declare: false,
is_abstract: node.class.is_abstract,
ident: node.ident.as_ref().map(|x| x.into()),
type_params: node.class.type_params.as_ref().map(|x| x.into()),
super_class: node.class.super_class.as_ref().map(|x| x.into()),
super_type_params: node.class.super_type_params.as_ref().map(|x| x.into()),
implements: node.class.implements.iter().map(|x| x.into()).collect(),
members: node.class.body.iter().map(|x| x.into()).collect(),
brace_position: context.config.class_expression_brace_position,
}, context)
}
fn parse_conditional_expr<'a>(node: &'a CondExpr, context: &mut Context<'a>) -> PrintItems {
let operator_token = context.token_finder.get_first_operator_after(&*node.test, "?").unwrap();
let force_new_lines = !context.config.conditional_expression_prefer_single_line && (
node_helpers::get_use_new_lines_for_nodes(&*node.test, &*node.cons, context)
|| node_helpers::get_use_new_lines_for_nodes(&*node.cons, &*node.alt, context)
);
let operator_position = get_operator_position(node, &operator_token, context);
let top_most_data = get_top_most_data(node, context);
let before_alternate_info = Info::new("beforeAlternateInfo");
let end_info = Info::new("endConditionalExpression");
let mut items = PrintItems::new();
if top_most_data.is_top_most {
items.push_info(top_most_data.top_most_info);
}
items.extend(parser_helpers::new_line_group(with_queued_indent(parse_node_with_inner_parse((&node.test).into(), context, {
move |mut items, _| {
if operator_position == OperatorPosition::SameLine {
items.push_str(" ?");
}
items
}
}))));
// force re-evaluation of all the conditions below once the end info has been reached
items.push_condition(conditions::force_reevaluation_once_resolved(context.end_statement_or_member_infos.peek().map(|x| x.clone()).unwrap_or(end_info)));
if force_new_lines {
items.push_signal(Signal::NewLine);
} else {
items.push_condition(conditions::new_line_if_multiple_lines_space_or_new_line_otherwise(top_most_data.top_most_info, Some(before_alternate_info)));
}
let cons_and_alt_items = {
let mut items = PrintItems::new();
if operator_position == OperatorPosition::NextLine {
items.push_str("? ");
}
items.extend(parser_helpers::new_line_group(parse_node_with_inner_parse((&node.cons).into(), context, {
move |mut items, _| {
if operator_position == OperatorPosition::SameLine {
items.push_str(" :");
items
} else {
conditions::indent_if_start_of_line(items).into()
}
}
})));
if force_new_lines {
items.push_signal(Signal::NewLine);
} else {
items.push_condition(conditions::new_line_if_multiple_lines_space_or_new_line_otherwise(top_most_data.top_most_info, Some(before_alternate_info)));
}
if operator_position == OperatorPosition::NextLine {
items.push_str(": ");
}
items.push_info(before_alternate_info);
items.extend(parser_helpers::new_line_group(parse_node_with_inner_parse((&node.alt).into(), context, |items, _| {
if operator_position == OperatorPosition::NextLine {
conditions::indent_if_start_of_line(items).into()
} else {
items
}
})));
items.push_info(end_info);
items
};
if top_most_data.is_top_most {
items.push_condition(conditions::indent_if_start_of_line(cons_and_alt_items));
} else {
let cons_and_alt_items = cons_and_alt_items.into_rc_path();
let top_most_info = top_most_data.top_most_info;
items.push_condition(if_true_or(
"indentIfSameIndentationAsTopMostAndStartOfLine",
move |context| {
if context.writer_info.is_start_of_line() {
let top_most_info = context.get_resolved_info(&top_most_info)?;
Some(context.writer_info.indent_level == top_most_info.indent_level)
} else {
Some(false)
}
},
with_indent(cons_and_alt_items.clone().into()),
cons_and_alt_items.into(),
));
}
return items;
struct TopMostData {
top_most_info: Info,
is_top_most: bool,
}
fn get_top_most_data(node: &CondExpr, context: &mut Context) -> TopMostData {
// The "top most" node in nested conditionals follows the ancestors up through
// the alternate expressions.
let mut top_most_node = node;
for ancestor in context.parent_stack.iter() {
if let Node::CondExpr(parent) = ancestor {
if parent.alt.lo() == top_most_node.lo() {
top_most_node = parent;
} else {
break;
}
} else {
break;
}
}
let is_top_most = top_most_node == node;
let top_most_info = get_or_set_top_most_info(top_most_node.lo(), is_top_most, context);
return TopMostData {
is_top_most,
top_most_info,
};
fn get_or_set_top_most_info(top_most_expr_start: BytePos, is_top_most: bool, context: &mut Context) -> Info {
if is_top_most {
let info = Info::new("conditionalExprStart");
context.store_info_for_node(&top_most_expr_start, info);
info
} else {
context.get_info_for_node(&top_most_expr_start).expect("Expected to have the top most expr info stored")
}
}
}
fn get_operator_position(node: &CondExpr, operator_token: &TokenAndSpan, context: &mut Context) -> OperatorPosition {
match context.config.conditional_expression_operator_position {
OperatorPosition::NextLine => OperatorPosition::NextLine,
OperatorPosition::SameLine => OperatorPosition::SameLine,
OperatorPosition::Maintain => {
if node.test.end_line(context) == operator_token.start_line(context) {
OperatorPosition::SameLine
} else {
OperatorPosition::NextLine
}
}
}
}
}
fn parse_expr_or_spread<'a>(node: &'a ExprOrSpread, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if node.spread.is_some() { items.push_str("..."); }
items.extend(parse_node((&node.expr).into(), context));
items
}
fn parse_expr_with_type_args<'a>(node: &'a TsExprWithTypeArgs, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.expr).into(), context));
if let Some(type_args) = &node.type_args {
items.extend(parse_node(type_args.into(), context));
}
return items;
}
fn parse_fn_expr<'a>(node: &'a FnExpr, context: &mut Context<'a>) -> PrintItems {
parse_function_decl_or_expr(FunctionDeclOrExprNode {
is_func_decl: false,
ident: node.ident.as_ref(),
declare: false,
func: &node.function,
}, context)
}
fn parse_getter_prop<'a>(node: &'a GetterProp, context: &mut Context<'a>) -> PrintItems {
return parse_class_or_object_method(ClassOrObjectMethod {
parameters_span_data: node.get_parameters_span_data(context),
decorators: None,
accessibility: None,
is_static: false,
is_async: false,
is_abstract: false,
kind: ClassOrObjectMethodKind::Getter,
is_generator: false,
is_optional: false,
key: (&node.key).into(),
type_params: None,
params: Vec::new(),
return_type: node.type_ann.as_ref().map(|x| x.into()),
body: node.body.as_ref().map(|x| x.into()),
}, context);
}
fn parse_key_value_prop<'a>(node: &'a KeyValueProp, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.key).into(), context));
items.extend(parse_assignment((&node.value).into(), ":", context));
items
}
fn parse_member_expr<'a>(node: &'a MemberExpr, context: &mut Context<'a>) -> PrintItems {
parse_for_member_like_expr(MemberLikeExpr {
left_node: (&node.obj).into(),
right_node: (&node.prop).into(),
is_computed: node.computed,
}, context)
}
fn parse_meta_prop_expr<'a>(node: &'a MetaPropExpr, context: &mut Context<'a>) -> PrintItems {
parse_for_member_like_expr(MemberLikeExpr {
left_node: (&node.meta).into(),
right_node: (&node.prop).into(),
is_computed: false,
}, context)
}
fn parse_new_expr<'a>(node: &'a NewExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("new ");
items.extend(parse_node((&node.callee).into(), context));
if let Some(type_args) = &node.type_args { items.extend(parse_node(type_args.into(), context)); }
let args = match node.args.as_ref() {
Some(args) => args.iter().map(|node| node.into()).collect(),
None => Vec::new(),
};
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: args,
custom_close_paren: |_| None,
is_parameters: false
}, context));
items
}
fn parse_non_null_expr<'a>(node: &'a TsNonNullExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.expr).into(), context));
items.push_str("!");
items
}
fn parse_object_lit<'a>(node: &'a ObjectLit, context: &mut Context<'a>) -> PrintItems {
parse_object_like_node(ParseObjectLikeNodeOptions {
node_span_data: node.span,
members: node.props.iter().map(|x| x.into()).collect(),
separator: context.config.object_expression_trailing_commas.into(),
prefer_hanging: context.config.object_expression_prefer_hanging,
prefer_single_line: context.config.object_expression_prefer_single_line,
surround_single_line_with_spaces: true,
}, context)
}
fn parse_paren_expr<'a>(node: &'a ParenExpr, context: &mut Context<'a>) -> PrintItems {
let parsed_items = conditions::with_indent_if_start_of_line_indented(parse_node_in_parens(
|context| parse_node((&node.expr).into(), context),
ParseNodeInParensOptions {
inner_span: node.expr.span_data(),
prefer_hanging: true,
allow_open_paren_trailing_comments: true,
},
context
)).into();
return if get_use_new_line_group(node, context) {
new_line_group(parsed_items)
} else {
parsed_items
};
fn get_use_new_line_group(node: &ParenExpr, context: &mut Context) -> bool {
if let Node::ArrowExpr(arrow_expr) = context.parent() {
debug_assert!(arrow_expr.body.lo() == node.lo());
use_new_line_group_for_arrow_body(arrow_expr)
} else {
true
}
}
}
fn parse_sequence_expr<'a>(node: &'a SeqExpr, context: &mut Context<'a>) -> PrintItems {
parse_separated_values(ParseSeparatedValuesOptions {
nodes: node.exprs.iter().map(|x| Some(x.into())).collect(),
prefer_hanging: context.config.sequence_expression_prefer_hanging,
force_use_new_lines: false,
allow_blank_lines: false,
separator: TrailingCommas::Never.into(),
single_line_space_at_start: false,
single_line_space_at_end: false,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::same_line_start_hanging_indent(),
force_possible_newline_at_start: false,
}, context)
}
fn parse_setter_prop<'a>(node: &'a SetterProp, context: &mut Context<'a>) -> PrintItems {
parse_class_or_object_method(ClassOrObjectMethod {
parameters_span_data: node.get_parameters_span_data(context),
decorators: None,
accessibility: None,
is_static: false,
is_async: false,
is_abstract: false,
kind: ClassOrObjectMethodKind::Setter,
is_generator: false,
is_optional: false,
key: (&node.key).into(),
type_params: None,
params: vec![(&node.param).into()],
return_type: None,
body: node.body.as_ref().map(|x| x.into()),
}, context)
}
fn parse_spread_element<'a>(node: &'a SpreadElement, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("...");
items.extend(parse_node((&node.expr).into(), context));
if context.parent().kind() == NodeKind::JSXOpeningElement {
parse_as_jsx_expr_container(items, context)
} else {
items
}
}
fn parse_tagged_tpl<'a>(node: &'a TaggedTpl, context: &mut Context<'a>) -> PrintItems {
let use_space = context.config.tagged_template_space_before_literal;
let mut items = parse_node((&node.tag).into(), context);
if let Some(type_params) = &node.type_params { items.extend(parse_node(type_params.into(), context)); }
items.push_condition(conditions::if_above_width_or(
context.config.indent_width,
if use_space { Signal::SpaceOrNewLine } else { Signal::PossibleNewLine }.into(),
if use_space { " ".into() } else { PrintItems::new() }
));
items.push_condition(conditions::indent_if_start_of_line(parse_template_literal(&node.quasis, &node.exprs.iter().map(|x| &**x).collect(), context)));
items
}
fn parse_tpl<'a>(node: &'a Tpl, context: &mut Context<'a>) -> PrintItems {
parse_template_literal(&node.quasis, &node.exprs.iter().map(|x| &**x).collect(), context)
}
fn parse_tpl_element<'a>(node: &'a TplElement, context: &mut Context<'a>) -> PrintItems {
parse_raw_string(node.text(context).into())
}
fn parse_template_literal<'a>(quasis: &'a Vec<TplElement>, exprs: &Vec<&'a Expr>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("`");
items.push_signal(Signal::StartIgnoringIndent);
for node in get_nodes(quasis, exprs) {
if node.kind() == NodeKind::TplElement {
items.extend(parse_node(node, context));
} else {
items.push_str("${");
items.push_signal(Signal::FinishIgnoringIndent);
let keep_on_one_line = get_keep_on_one_line(&node);
let possible_surround_newlines = get_possible_surround_newlines(&node);
let parsed_expr = parse_node(node, context);
items.extend(if keep_on_one_line {
with_no_new_lines(parsed_expr)
} else {
if possible_surround_newlines {
parser_helpers::surround_with_newlines_indented_if_multi_line(new_line_group(parsed_expr), context.config.indent_width)
} else {
parsed_expr
}
});
items.push_str("}");
items.push_signal(Signal::StartIgnoringIndent);
}
}
items.push_str("`");
items.push_signal(Signal::FinishIgnoringIndent);
return items;
fn get_nodes<'a>(quasis: &'a Vec<TplElement>, exprs: &Vec<&'a Expr>) -> Vec<Node<'a>> {
let quasis = quasis;
let exprs = exprs;
let mut nodes = Vec::new();
let mut quasis_index = 0;
let mut exprs_index = 0;
while quasis_index < quasis.len() || exprs_index < exprs.len() {
let current_quasis = quasis.get(quasis_index);
let current_expr = exprs.get(exprs_index);
let is_quasis = if let Some(current_quasis) = current_quasis {
if let Some(current_expr) = current_expr {
if current_quasis.lo() < current_expr.lo() {
true
} else {
false
}
} else {
true
}
} else {
false
};
if is_quasis {
nodes.push((&quasis[quasis_index]).into());
quasis_index += 1;
} else {
nodes.push(exprs[exprs_index].into());
exprs_index += 1;
}
}
return nodes;
}
// handle this on a case by case basis for now
fn get_keep_on_one_line(node: &Node) -> bool {
match node {
Node::Ident(_) | Node::ThisExpr(_) | Node::Super(_) | Node::Str(_) | Node::PrivateName(_) => true,
Node::MemberExpr(expr) => keep_member_expr_on_one_line(expr),
Node::CallExpr(expr) => keep_call_expr_on_one_line(expr),
_ => false,
}
}
fn get_possible_surround_newlines(node: &Node) -> bool {
match node {
Node::CondExpr(_) => true,
Node::MemberExpr(expr) => !keep_member_expr_on_one_line(expr),
Node::CallExpr(expr) => !keep_call_expr_on_one_line(expr),
_ => false,
}
}
fn keep_member_expr_on_one_line(expr: &MemberExpr) -> bool {
get_keep_on_one_line(&(&expr.obj).into()) && get_keep_on_one_line(&(&expr.prop).into()) && !expr.computed
}
fn keep_call_expr_on_one_line(expr: &CallExpr) -> bool {
expr.args.is_empty() && get_keep_on_one_line(&(&expr.callee).into())
}
}
fn parse_type_assertion<'a>(node: &'a TsTypeAssertion, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("<");
items.extend(parse_node((&node.type_ann).into(), context));
items.push_str(">");
if context.config.type_assertion_space_before_expression { items.push_str(" "); }
items.extend(parse_node((&node.expr).into(), context));
items
}
fn parse_unary_expr<'a>(node: &'a UnaryExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str(get_operator_text(node.op));
items.extend(parse_node((&node.arg).into(), context));
return items;
fn get_operator_text<'a>(op: UnaryOp) -> &'a str {
match op {
UnaryOp::Void => "void ",
UnaryOp::TypeOf => "typeof ",
UnaryOp::Delete => "delete ",
UnaryOp::Bang => "!",
UnaryOp::Plus => "+",
UnaryOp::Minus => "-",
UnaryOp::Tilde => "~",
}
}
}
fn parse_update_expr<'a>(node: &'a UpdateExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let operator_text = get_operator_text(node.op);
if node.prefix {
items.push_str(operator_text);
}
items.extend(parse_node((&node.arg).into(), context));
if !node.prefix {
items.push_str(operator_text);
}
return items;
fn get_operator_text<'a>(operator: UpdateOp) -> &'a str {
match operator {
UpdateOp::MinusMinus => "--",
UpdateOp::PlusPlus => "++",
}
}
}
fn parse_yield_expr<'a>(node: &'a YieldExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("yield");
if node.delegate { items.push_str("*"); }
if let Some(arg) = &node.arg {
items.push_str(" ");
items.extend(parse_node(arg.into(), context));
}
items
}
/* exports */
fn parse_export_named_specifier<'a>(node: &'a ExportNamedSpecifier, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.orig).into(), context));
if let Some(exported) = &node.exported {
items.push_signal(Signal::SpaceOrNewLine);
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
items.push_str("as ");
items.extend(parse_node(exported.into(), context));
items
}));
}
items
}
fn parse_namespace_export_specifier<'a>(node: &'a ExportNamespaceSpecifier, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("* as ");
items.extend(parse_node((&node.name).into(), context));
items
}
/* imports */
fn parse_import_named_specifier<'a>(node: &'a ImportNamedSpecifier, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if let Some(imported) = &node.imported {
items.extend(parse_node(imported.into(), context));
items.push_signal(Signal::SpaceOrNewLine);
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
items.push_str("as ");
items.extend(parse_node((&node.local).into(), context));
items
}));
} else {
items.extend(parse_node((&node.local).into(), context));
}
items
}
fn parse_import_namespace_specifier<'a>(node: &'a ImportStarAsSpecifier, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("* as ");
items.extend(parse_node((&node.local).into(), context));
return items;
}
fn parse_external_module_ref<'a>(node: &'a TsExternalModuleRef, context: &mut Context<'a>) -> PrintItems {
// force everything on a single line
let mut items = PrintItems::new();
items.push_str("require(");
items.extend(parse_node((&node.expr).into(), context));
items.push_str(")");
items
}
/* interface / type element */
fn parse_call_signature_decl<'a>(node: &'a TsCallSignatureDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_info = Info::new("startCallSignature");
items.push_info(start_info);
if let Some(type_params) = &node.type_params { items.extend(parse_node(type_params.into(), context)); }
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: node.params.iter().map(|node| node.into()).collect(),
custom_close_paren: |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info,
type_node: node.type_ann.as_ref().map(|x| x.into()),
type_node_separator: None,
param_count: node.params.len(),
}, context)),
is_parameters: true,
}, context));
return items;
}
fn parse_construct_signature_decl<'a>(node: &'a TsConstructSignatureDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_info = Info::new("startConstructSignature");
items.push_info(start_info);
items.push_str("new");
if context.config.construct_signature_space_after_new_keyword { items.push_str(" "); }
if let Some(type_params) = &node.type_params { items.extend(parse_node(type_params.into(), context)); }
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: node.params.iter().map(|node| node.into()).collect(),
custom_close_paren: |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info,
type_node: node.type_ann.as_ref().map(|x| x.into()),
type_node_separator: None,
param_count: node.params.len(),
}, context)),
is_parameters: true,
}, context));
return items;
}
fn parse_index_signature<'a>(node: &'a TsIndexSignature, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if node.readonly { items.push_str("readonly "); }
let param: Node<'a> = node.params.iter().next().expect("Expected the index signature to have one parameter.").into();
items.extend(parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: param.span_data(),
inner_items: parse_node(param, context)
}, context));
items.extend(parse_type_ann_with_colon_if_exists(&node.type_ann, context));
return items;
}
fn parse_method_signature<'a>(node: &'a TsMethodSignature, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_info = Info::new("startMethodSignature");
items.push_info(start_info);
let key_items = parse_node((&node.key).into(), context);
items.extend(if node.computed {
parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: node.key.span_data(),
inner_items: key_items
}, context)
} else {
key_items
});
if node.optional { items.push_str("?"); }
if let Some(type_params) = &node.type_params { items.extend(parse_node(type_params.into(), context)); }
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: node.params.iter().map(|node| node.into()).collect(),
custom_close_paren: |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info,
type_node: node.type_ann.as_ref().map(|x| x.into()),
type_node_separator: None,
param_count: node.params.len(),
}, context)),
is_parameters: true,
}, context));
return items;
}
fn parse_property_signature<'a>(node: &'a TsPropertySignature, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if node.readonly { items.push_str("readonly "); }
let key_items = parse_node((&node.key).into(), context);
items.extend(if node.computed {
parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: node.key.span_data(),
inner_items: key_items
}, context)
} else {
key_items
});
if node.optional { items.push_str("?"); }
items.extend(parse_type_ann_with_colon_if_exists(&node.type_ann, context));
if let Some(init) = &node.init {
items.extend(parse_assignment(init.into(), "=", context));
}
return items;
}
fn parse_interface_body<'a>(node: &'a TsInterfaceBody, context: &mut Context<'a>) -> PrintItems {
let start_header_info = get_parent_info(context);
return parse_membered_body(ParseMemberedBodyOptions {
span_data: node.span,
members: node.body.iter().map(|x| x.into()).collect(),
start_header_info: start_header_info,
brace_position: context.config.interface_declaration_brace_position,
should_use_blank_line: move |previous, next, context| {
node_helpers::has_separating_blank_line(previous, next, context)
},
separator: context.config.semi_colons.into(),
}, context);
fn get_parent_info(context: &mut Context) -> Option<Info> {
for ancestor in context.parent_stack.iter() {
if let Node::TsInterfaceDecl(ancestor) = ancestor {
return context.get_info_for_node(*ancestor).map(|x| x.to_owned());
}
}
return None;
}
}
fn parse_type_lit<'a>(node: &'a TsTypeLit, context: &mut Context<'a>) -> PrintItems {
return parse_object_like_node(ParseObjectLikeNodeOptions {
node_span_data: node.span,
members: node.members.iter().map(|m| m.into()).collect(),
separator: Separator {
single_line: Some(semi_colon_or_comma_to_separator_value(context.config.type_literal_separator_kind_single_line, context)),
multi_line: Some(semi_colon_or_comma_to_separator_value(context.config.type_literal_separator_kind_multi_line, context)),
},
prefer_hanging: context.config.type_literal_prefer_hanging,
prefer_single_line: context.config.type_literal_prefer_single_line,
surround_single_line_with_spaces: true,
}, context);
fn semi_colon_or_comma_to_separator_value(value: SemiColonOrComma, context: &mut Context) -> SeparatorValue {
match value {
SemiColonOrComma::Comma => SeparatorValue::Comma(context.config.type_literal_trailing_commas),
SemiColonOrComma::SemiColon => SeparatorValue::SemiColon(context.config.semi_colons),
}
}
}
/* jsx */
fn parse_jsx_attribute<'a>(node: &'a JSXAttr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.name).into(), context));
if let Some(value) = &node.value {
items.push_str("=");
let surround_with_braces = context.token_finder.get_previous_token_if_open_brace(value).is_some();
let parsed_value = parse_node(value.into(), context);
items.extend(if surround_with_braces {
parse_as_jsx_expr_container(parsed_value, context)
} else {
parsed_value
});
}
items
}
fn parse_jsx_closing_element<'a>(node: &'a JSXClosingElement, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("</");
items.extend(parse_node((&node.name).into(), context));
items.push_str(">");
items
}
fn parse_jsx_closing_fragment<'a>(_: &'a JSXClosingFragment, _: &mut Context<'a>) -> PrintItems {
"</>".into()
}
fn parse_jsx_element<'a>(node: &'a JSXElement, context: &mut Context<'a>) -> PrintItems {
if let Some(closing) = &node.closing {
parse_jsx_with_opening_and_closing(ParseJsxWithOpeningAndClosingOptions {
opening_element: (&node.opening).into(),
closing_element: closing.into(),
children: node.children.iter().map(|x| x.into()).collect(),
}, context)
} else {
parse_node((&node.opening).into(), context)
}
}
fn parse_jsx_empty_expr<'a>(node: &'a JSXEmptyExpr, context: &mut Context<'a>) -> PrintItems {
parse_comment_collection(get_jsx_empty_expr_comments(node, context), None, None, context)
}
fn parse_jsx_expr_container<'a>(node: &'a JSXExprContainer, context: &mut Context<'a>) -> PrintItems {
// Don't send JSX empty expressions to parse_node because it will not handle comments
// the way they should be specifically handled for empty expressions.
let expr_items = match &node.expr {
JSXExpr::JSXEmptyExpr(expr) => parse_jsx_empty_expr(expr, context),
JSXExpr::Expr(expr) => parse_node(expr.into(), context),
};
parse_as_jsx_expr_container(expr_items, context)
}
fn parse_as_jsx_expr_container(parsed_node: PrintItems, context: &mut Context) -> PrintItems {
let surround_with_space = context.config.jsx_expression_container_space_surrounding_expression;
let mut items = PrintItems::new();
items.push_str("{");
if surround_with_space { items.push_str(" "); }
items.extend(parsed_node);
if surround_with_space { items.push_str(" "); }
items.push_str("}");
items
}
fn parse_jsx_fragment<'a>(node: &'a JSXFragment, context: &mut Context<'a>) -> PrintItems {
parse_jsx_with_opening_and_closing(ParseJsxWithOpeningAndClosingOptions {
opening_element: (&node.opening).into(),
closing_element: (&node.closing).into(),
children: node.children.iter().map(|x| x.into()).collect(),
}, context)
}
fn parse_jsx_member_expr<'a>(node: &'a JSXMemberExpr, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.obj).into(), context));
items.push_str(".");
items.extend(parse_node((&node.prop).into(), context));
items
}
fn parse_jsx_namespaced_name<'a>(node: &'a JSXNamespacedName, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.ns).into(), context));
items.push_str(":");
items.extend(parse_node((&node.name).into(), context));
items
}
fn parse_jsx_opening_element<'a>(node: &'a JSXOpeningElement, context: &mut Context<'a>) -> PrintItems {
let force_use_new_lines = get_force_is_multi_line(node, context);
let start_info = Info::new("openingElementStartInfo");
let mut items = PrintItems::new();
items.push_info(start_info);
items.push_str("<");
items.extend(parse_node((&node.name).into(), context));
if let Some(type_args) = &node.type_args {
items.extend(parse_node(type_args.into(), context));
}
if !node.attrs.is_empty() {
items.extend(parse_separated_values(ParseSeparatedValuesOptions {
nodes: node.attrs.iter().map(|p| Some(p.into())).collect(),
prefer_hanging: context.config.jsx_attributes_prefer_hanging,
force_use_new_lines,
allow_blank_lines: false,
separator: Separator::none(),
single_line_space_at_start: true,
single_line_space_at_end: node.self_closing,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::surround_newlines_indented(),
force_possible_newline_at_start: false,
}, context));
} else {
if node.self_closing {
items.push_str(" ");
}
}
if node.self_closing {
items.push_str("/");
} else {
if context.config.jsx_attributes_prefer_hanging {
items.push_condition(conditions::new_line_if_hanging(start_info, None));
}
}
items.push_str(">");
return items;
fn get_force_is_multi_line(node: &JSXOpeningElement, context: &mut Context) -> bool {
if context.config.jsx_attributes_prefer_single_line {
false
} else if let Some(first_attrib) = node.attrs.first() {
node_helpers::get_use_new_lines_for_nodes(&node.name, first_attrib, context)
} else {
false
}
}
}
fn parse_jsx_opening_fragment<'a>(_: &'a JSXOpeningFragment, _: &mut Context<'a>) -> PrintItems {
"<>".into()
}
fn parse_jsx_spread_child<'a>(node: &'a JSXSpreadChild, context: &mut Context<'a>) -> PrintItems {
parse_as_jsx_expr_container({
let mut items = PrintItems::new();
items.push_str("...");
items.extend(parse_node((&node.expr).into(), context));
items
}, context)
}
fn parse_jsx_text<'a>(node: &'a JSXText, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
for (i, line) in get_lines(node.text(context)).into_iter().enumerate() {
if i > 0 {
items.push_signal(Signal::NewLine);
items.push_signal(Signal::NewLine);
}
let mut was_last_space_or_newline = true;
for word in line.split(' ') {
if !was_last_space_or_newline {
items.push_signal(Signal::SpaceOrNewLine);
was_last_space_or_newline = true;
}
if !word.is_empty() {
items.push_str(word);
was_last_space_or_newline = false;
}
}
}
return parser_helpers::new_line_group(items);
fn get_lines(node_text: &str) -> Vec<String> {
let mut past_line: Option<&str> = None;
let lines = node_text.trim().lines().map(|line| line.trim());
let mut result = Vec::new();
let mut current_line = String::new();
for line in lines {
if let Some(past_line) = past_line {
if !line.is_empty() && past_line.is_empty() && !current_line.is_empty() {
result.push(current_line);
current_line = String::new();
}
}
if !line.is_empty() {
if !current_line.is_empty() {
current_line.push_str(" ");
}
current_line.push_str(line);
}
past_line.replace(line);
}
if !current_line.is_empty() {
result.push(current_line);
}
result
}
}
/* literals */
fn parse_big_int_literal<'a>(node: &'a BigInt, context: &mut Context<'a>) -> PrintItems {
node.text(context).into()
}
fn parse_bool_literal(node: &Bool) -> PrintItems {
match node.value {
true => "true",
false => "false",
}.into()
}
fn parse_num_literal<'a>(node: &'a Number, context: &mut Context<'a>) -> PrintItems {
node.text(context).into()
}
fn parse_reg_exp_literal(node: &Regex, _: &mut Context) -> PrintItems {
// the exp and flags should not be nodes so just ignore that (swc issue #511)
let mut items = PrintItems::new();
items.push_str("/");
items.push_str(&node.exp as &str);
items.push_str("/");
items.push_str(&node.flags as &str);
items
}
fn parse_string_literal<'a>(node: &'a Str, context: &mut Context<'a>) -> PrintItems {
return parse_raw_string(&get_string_literal_text(get_string_value(&node, context), context));
fn get_string_literal_text(string_value: String, context: &mut Context) -> String {
return match context.config.quote_style {
QuoteStyle::AlwaysDouble => format_with_double(string_value),
QuoteStyle::AlwaysSingle => format_with_single(string_value),
QuoteStyle::PreferDouble => if double_to_single(&string_value) <= 0 {
format_with_double(string_value)
} else {
format_with_single(string_value)
},
QuoteStyle::PreferSingle => if double_to_single(&string_value) >= 0 {
format_with_single(string_value)
} else {
format_with_double(string_value)
},
};
fn format_with_double(string_value: String) -> String {
format!("\"{}\"", string_value.replace("\"", "\\\""))
}
fn format_with_single(string_value: String) -> String {
format!("'{}'", string_value.replace("'", "\\'"))
}
fn double_to_single(string_value: &str) -> i32 {
let mut double_count = 0;
let mut single_count = 0;
for c in string_value.chars() {
match c {
'"' => double_count += 1,
'\'' => single_count += 1,
_ => {},
}
}
return double_count - single_count;
}
}
fn get_string_value(node: &Str, context: &mut Context) -> String {
let raw_string_text = node.text(context);
let string_value = raw_string_text.chars().skip(1).take(raw_string_text.chars().count() - 2).collect::<String>();
let is_double_quote = raw_string_text.chars().next().unwrap() == '"';
match is_double_quote {
true => string_value.replace("\\\"", "\""),
false => string_value.replace("\\'", "'"),
}
}
}
/* module */
fn parse_module<'a>(node: &'a Module, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if let Some(shebang) = &node.shebang {
items.push_str("#!");
items.push_str(&shebang as &str);
items.push_signal(Signal::NewLine);
if let Some(first_statement) = node.body.first() {
if node_helpers::has_separating_blank_line(&node.span.lo(), first_statement, context) {
items.push_signal(Signal::NewLine);
}
}
}
items.extend(parse_statements(node.span, node.body.iter().map(|x| x.into()), context));
return items;
}
/* patterns */
fn parse_array_pat<'a>(node: &'a ArrayPat, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_array_like_nodes(ParseArrayLikeNodesOptions {
parent_span_data: node.span,
nodes: node.elems.iter().map(|x| x.as_ref().map(|elem| elem.into())).collect(),
prefer_hanging: context.config.array_pattern_prefer_hanging,
prefer_single_line: context.config.array_pattern_prefer_single_line,
trailing_commas: context.config.array_pattern_trailing_commas,
}, context));
if node.optional { items.push_str("?"); }
items.extend(parse_type_ann_with_colon_if_exists(&node.type_ann, context));
items
}
fn parse_assign_pat<'a>(node: &'a AssignPat, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.left).into(), context));
items.extend(parse_assignment((&node.right).into(), "=", context));
items
}
fn parse_assign_pat_prop<'a>(node: &'a AssignPatProp, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.key).into(), context));
if let Some(value) = &node.value {
items.extend(parse_assignment(value.into(), "=", context));
}
items
}
fn parse_key_value_pat_prop<'a>(node: &'a KeyValuePatProp, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.key).into(), context));
items.extend(parse_assignment((&node.value).into(), ":", context));
items
}
fn parse_rest_pat<'a>(node: &'a RestPat, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("...");
items.extend(parse_node((&node.arg).into(), context));
items.extend(parse_type_ann_with_colon_if_exists(&node.type_ann, context));
items
}
fn parse_object_pat<'a>(node: &'a ObjectPat, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_object_like_node(ParseObjectLikeNodeOptions {
node_span_data: node.span,
members: node.props.iter().map(|x| x.into()).collect(),
separator: get_trailing_commas(node, context).into(),
prefer_hanging: context.config.object_pattern_prefer_hanging,
prefer_single_line: context.config.object_pattern_prefer_single_line,
surround_single_line_with_spaces: true,
}, context));
if node.optional { items.push_str("?"); }
items.extend(parse_type_ann_with_colon_if_exists(&node.type_ann, context));
return items;
fn get_trailing_commas(node: &ObjectPat, context: &Context) -> TrailingCommas {
if let Some(last) = node.props.last() {
if last.kind() == NodeKind::RestPat {
return TrailingCommas::Never;
}
}
context.config.object_pattern_trailing_commas
}
}
/* properties */
fn parse_method_prop<'a>(node: &'a MethodProp, context: &mut Context<'a>) -> PrintItems {
return parse_class_or_object_method(ClassOrObjectMethod {
parameters_span_data: node.get_parameters_span_data(context),
decorators: None,
accessibility: None,
is_static: false,
is_async: node.function.is_async,
is_abstract: false,
kind: ClassOrObjectMethodKind::Method,
is_generator: node.function.is_generator,
is_optional: false,
key: (&node.key).into(),
type_params: node.function.type_params.as_ref().map(|x| x.into()),
params: node.function.params.iter().map(|x| x.into()).collect(),
return_type: node.function.return_type.as_ref().map(|x| x.into()),
body: node.function.body.as_ref().map(|x| x.into()),
}, context);
}
struct ClassOrObjectMethod<'a> {
parameters_span_data: Option<Span>,
decorators: Option<&'a Vec<Decorator>>,
accessibility: Option<Accessibility>,
is_static: bool,
is_async: bool,
is_abstract: bool,
kind: ClassOrObjectMethodKind,
is_generator: bool,
is_optional: bool,
key: Node<'a>,
type_params: Option<Node<'a>>,
params: Vec<Node<'a>>,
return_type: Option<Node<'a>>,
body: Option<Node<'a>>,
}
enum ClassOrObjectMethodKind {
Getter,
Setter,
Method,
Constructor,
}
impl From<MethodKind> for ClassOrObjectMethodKind {
fn from(kind: MethodKind) -> ClassOrObjectMethodKind {
match kind {
MethodKind::Getter => ClassOrObjectMethodKind::Getter,
MethodKind::Setter => ClassOrObjectMethodKind::Setter,
MethodKind::Method => ClassOrObjectMethodKind::Method,
}
}
}
fn parse_class_or_object_method<'a>(node: ClassOrObjectMethod<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if let Some(decorators) = node.decorators.as_ref() {
items.extend(parse_decorators(decorators, false, context));
}
let start_header_info = Info::new("methodStartHeaderInfo");
items.push_info(start_header_info);
if let Some(accessibility) = node.accessibility {
items.push_str(&format!("{} ", accessibility_to_str(&accessibility)));
}
if node.is_static { items.push_str("static "); }
if node.is_abstract { items.push_str("abstract "); }
if node.is_async { items.push_str("async "); }
match node.kind {
ClassOrObjectMethodKind::Getter => items.push_str("get "),
ClassOrObjectMethodKind::Setter => items.push_str("set "),
ClassOrObjectMethodKind::Method | ClassOrObjectMethodKind::Constructor => {},
}
if node.is_generator { items.push_str("*"); }
items.extend(parse_node(node.key, context));
if node.is_optional { items.push_str("?"); }
if let Some(type_params) = node.type_params { items.extend(parse_node(type_params, context)); }
if get_use_space_before_parens(&node.kind, context) { items.push_str(" ") }
let param_count = node.params.len();
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.parameters_span_data,
nodes: node.params.into_iter().map(|node| node.into()).collect(),
custom_close_paren: {
let return_type = node.return_type;
move |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info: start_header_info,
type_node: return_type,
type_node_separator: None,
param_count,
}, context))
},
is_parameters: true,
}, context));
if let Some(body) = node.body {
let brace_position = get_brace_position(&node.kind, context);
items.extend(parse_brace_separator(ParseBraceSeparatorOptions {
brace_position: brace_position,
open_brace_token: context.token_finder.get_first_open_brace_token_within(&body),
start_header_info: Some(start_header_info),
}, context));
items.extend(parse_node(body, context));
} else if context.config.semi_colons.is_true() {
items.push_str(";");
}
return items;
fn get_use_space_before_parens(kind: &ClassOrObjectMethodKind, context: &mut Context) -> bool {
match kind {
ClassOrObjectMethodKind::Constructor => context.config.constructor_space_before_parentheses,
ClassOrObjectMethodKind::Getter => context.config.get_accessor_space_before_parentheses,
ClassOrObjectMethodKind::Setter => context.config.set_accessor_space_before_parentheses,
ClassOrObjectMethodKind::Method => context.config.method_space_before_parentheses,
}
}
fn get_brace_position(kind: &ClassOrObjectMethodKind, context: &mut Context) -> BracePosition {
match kind {
ClassOrObjectMethodKind::Constructor => context.config.constructor_brace_position,
ClassOrObjectMethodKind::Getter => context.config.get_accessor_brace_position,
ClassOrObjectMethodKind::Setter => context.config.set_accessor_brace_position,
ClassOrObjectMethodKind::Method => context.config.method_brace_position,
}
}
}
fn accessibility_to_str(accessibility: &Accessibility) -> &str {
match accessibility {
Accessibility::Private => "private",
Accessibility::Protected => "protected",
Accessibility::Public => "public",
}
}
/* statements */
fn parse_block_stmt<'a>(node: &'a BlockStmt, context: &mut Context<'a>) -> PrintItems {
parse_block(|stmts, context| {
parse_statements(
node.get_inner_span_data(context),
stmts.into_iter(),
context
)
}, ParseBlockOptions {
span_data: Some(node.span),
children: node.stmts.iter().map(|x| x.into()).collect(),
}, context)
}
fn parse_break_stmt<'a>(node: &'a BreakStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("break");
if let Some(label) = &node.label {
items.push_str(" ");
items.extend(parse_node(label.into(), context));
}
if context.config.semi_colons.is_true() {
items.push_str(";");
}
items
}
fn parse_continue_stmt<'a>(node: &'a ContinueStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("continue");
if let Some(label) = &node.label {
items.push_str(" ");
items.extend(parse_node(label.into(), context));
}
if context.config.semi_colons.is_true() {
items.push_str(";");
}
items
}
fn parse_debugger_stmt<'a>(_: &'a DebuggerStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("debugger");
if context.config.semi_colons.is_true() {
items.push_str(";");
}
items
}
fn parse_do_while_stmt<'a>(node: &'a DoWhileStmt, context: &mut Context<'a>) -> PrintItems {
// the braces are technically optional on do while statements
let mut items = PrintItems::new();
items.push_str("do");
items.extend(parse_brace_separator(ParseBraceSeparatorOptions {
brace_position: context.config.do_while_statement_brace_position,
open_brace_token: if let Stmt::Block(_) = &*node.body { context.token_finder.get_first_open_brace_token_within(node) } else { None },
start_header_info: None,
}, context));
items.extend(parse_node((&node.body).into(), context));
items.push_str(" while");
if context.config.do_while_statement_space_after_while_keyword {
items.push_str(" ");
}
items.extend(parse_node_in_parens(
|context| parse_node((&node.test).into(), context),
ParseNodeInParensOptions {
inner_span: node.test.span_data(),
prefer_hanging: context.config.do_while_statement_prefer_hanging,
allow_open_paren_trailing_comments: false,
},
context
));
if context.config.semi_colons.is_true() {
items.push_str(";");
}
return items;
}
fn parse_export_all<'a>(node: &'a ExportAll, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("export * from ");
items.extend(parse_node((&node.src).into(), context));
if context.config.semi_colons.is_true() {
items.push_str(";");
}
items
}
fn parse_empty_stmt(_: &EmptyStmt, _: &mut Context) -> PrintItems {
";".into()
}
fn parse_export_assignment<'a>(node: &'a TsExportAssignment, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("export");
items.extend(parse_assignment((&node.expr).into(), "=", context));
if context.config.semi_colons.is_true() {
items.push_str(";");
}
items
}
fn parse_namespace_export<'a>(node: &'a TsNamespaceExportDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("export as namespace ");
items.extend(parse_node((&node.id).into(), context));
if context.config.semi_colons.is_true() {
items.push_str(";");
}
items
}
fn parse_expr_stmt<'a>(stmt: &'a ExprStmt, context: &mut Context<'a>) -> PrintItems {
if context.config.semi_colons.is_true() {
return parse_inner(&stmt, context);
} else {
return parse_for_prefix_semi_colon_insertion(&stmt, context);
}
fn parse_inner<'a>(stmt: &'a ExprStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&stmt.expr).into(), context));
if context.config.semi_colons.is_true() {
items.push_str(";");
}
return items;
}
fn parse_for_prefix_semi_colon_insertion<'a>(stmt: &'a ExprStmt, context: &mut Context<'a>) -> PrintItems {
let parsed_node = parse_inner(&stmt, context);
let parsed_node = parsed_node.into_rc_path();
return if should_add_semi_colon(&parsed_node).unwrap_or(false) {
let mut items = PrintItems::new();
items.push_str(";");
items.extend(parsed_node.into());
items
} else {
parsed_node.into()
};
fn should_add_semi_colon(path: &Option<PrintItemPath>) -> Option<bool> {
// todo: this needs to be improved
if let Some(path) = path {
for item in PrintItemsIterator::new(path.clone()) {
match item {
PrintItem::String(value) => {
if let Some(c) = value.text.chars().next() {
return utils::is_prefix_semi_colon_insertion_char(c).into();
}
},
PrintItem::Condition(condition) => {
// It's an assumption here that the true and false paths of the
// condition will both contain the same text to look for. This is probably not robust
// and perhaps instead there should be a way to do something like "get the next character" in
// the printer.
if let Some(result) = should_add_semi_colon(&condition.get_true_path()) {
return Some(result);
}
if let Some(result) = should_add_semi_colon(&condition.get_false_path()) {
return Some(result);
}
},
PrintItem::RcPath(items) => {
if let Some(result) = should_add_semi_colon(&Some(items)) {
return Some(result);
}
},
_ => { /* do nothing */ },
}
}
}
None
}
}
}
fn parse_for_stmt<'a>(node: &'a ForStmt, context: &mut Context<'a>) -> PrintItems {
let start_header_info = Info::new("startHeader");
let end_header_info = Info::new("endHeader");
let first_inner_node = {
if let Some(init) = &node.init {
init.span_data()
} else {
context.token_finder.get_first_semi_colon_within(node).expect("Expected to find a semi-colon in for stmt.").span
}
};
let last_inner_node = {
if let Some(update) = &node.update {
update.span_data()
} else if let Some(test) = &node.test {
context.token_finder.get_first_semi_colon_after(&test.span()).expect("Expected to find second semi-colon in for stmt.").span
} else if let Some(init) = &node.init {
let first_semi_colon = context.token_finder.get_first_semi_colon_after(init).expect("Expected to find a semi-colon in for stmt.");
context.token_finder.get_first_semi_colon_after(&first_semi_colon.span).expect("Expected to find second semi-colon in for stmt.").span
} else {
context.token_finder.get_first_semi_colon_after(&first_inner_node).expect("Expected to find second semi-colon in for stmt.").span
}
};
let force_use_new_lines = get_use_new_lines(&first_inner_node, context);
let mut items = PrintItems::new();
items.push_info(start_header_info);
items.push_str("for");
if context.config.for_statement_space_after_for_keyword {
items.push_str(" ");
}
let separator_after_semi_colons = if context.config.for_statement_space_after_semi_colons { Signal::SpaceOrNewLine } else { Signal::PossibleNewLine };
let parsed_init = parser_helpers::new_line_group({
let mut items = PrintItems::new();
if let Some(init) = &node.init {
items.extend(parse_node(init.into(), context));
}
items.push_str(";");
if node.test.is_none() { items.push_str(";"); }
items
});
let parsed_test = if let Some(test) = &node.test {
Some(parser_helpers::new_line_group({
let mut items = PrintItems::new();
items.extend(parse_node(test.into(), context));
items.push_str(";");
items
}))
} else {
None
};
let parsed_update = if let Some(update) = &node.update {
Some(parser_helpers::new_line_group(parse_node(update.into(), context)).into())
} else {
None
};
items.extend(parse_node_in_parens(
|context| {
parser_helpers::parse_separated_values(move |_| {
let mut parsed_nodes = Vec::new();
parsed_nodes.push(parser_helpers::ParsedValue::from_items(parsed_init));
if let Some(parsed_test) = parsed_test { parsed_nodes.push(parser_helpers::ParsedValue::from_items(parsed_test)); }
if let Some(parsed_update) = parsed_update { parsed_nodes.push(parser_helpers::ParsedValue::from_items(parsed_update)); }
parsed_nodes
}, parser_helpers::ParseSeparatedValuesOptions {
prefer_hanging: context.config.for_statement_prefer_hanging,
force_use_new_lines,
allow_blank_lines: false,
single_line_space_at_start: false,
single_line_space_at_end: false,
single_line_separator: separator_after_semi_colons.into(),
indent_width: context.config.indent_width,
multi_line_options: parser_helpers::MultiLineOptions::same_line_no_indent(),
force_possible_newline_at_start: false,
}).items
},
ParseNodeInParensOptions {
inner_span: create_span_data(first_inner_node.lo(), last_inner_node.hi()),
prefer_hanging: context.config.for_statement_prefer_hanging,
allow_open_paren_trailing_comments: false,
},
context
));
items.push_info(end_header_info);
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: (&node.body).into(),
use_braces: context.config.for_statement_use_braces,
brace_position: context.config.for_statement_brace_position,
single_body_position: Some(context.config.for_statement_single_body_position),
requires_braces_condition_ref: None,
header_start_token: None,
start_header_info: Some(start_header_info),
end_header_info: Some(end_header_info),
}, context).parsed_node);
return items;
fn get_use_new_lines<'a>(node: &dyn Ranged, context: &mut Context<'a>) -> bool {
if context.config.for_statement_prefer_single_line {
return false;
}
let open_paren_token = context.token_finder.get_previous_token_if_open_paren(node);
if let Some(open_paren_token) = open_paren_token {
node_helpers::get_use_new_lines_for_nodes(open_paren_token, node, context)
} else {
false
}
}
}
fn parse_for_in_stmt<'a>(node: &'a ForInStmt, context: &mut Context<'a>) -> PrintItems {
let start_header_info = Info::new("startHeader");
let end_header_info = Info::new("endHeader");
let mut items = PrintItems::new();
items.push_info(start_header_info);
items.push_str("for");
if context.config.for_in_statement_space_after_for_keyword {
items.push_str(" ");
}
let inner_header_span = create_span_data(node.left.lo(), node.right.hi());
items.extend(parse_node_in_parens(
|context| {
let mut items = PrintItems::new();
items.extend(parse_node((&node.left).into(), context));
items.push_signal(Signal::SpaceOrNewLine);
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
items.push_str("in ");
items.extend(parse_node((&node.right).into(), context));
items
}));
items
},
ParseNodeInParensOptions {
inner_span: inner_header_span,
prefer_hanging: context.config.for_in_statement_prefer_hanging,
allow_open_paren_trailing_comments: false,
},
context
));
items.push_info(end_header_info);
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: (&node.body).into(),
use_braces: context.config.for_in_statement_use_braces,
brace_position: context.config.for_in_statement_brace_position,
single_body_position: Some(context.config.for_in_statement_single_body_position),
requires_braces_condition_ref: None,
header_start_token: None,
start_header_info: Some(start_header_info),
end_header_info: Some(end_header_info),
}, context).parsed_node);
return items;
}
fn parse_for_of_stmt<'a>(node: &'a ForOfStmt, context: &mut Context<'a>) -> PrintItems {
let start_header_info = Info::new("startHeader");
let end_header_info = Info::new("endHeader");
let mut items = PrintItems::new();
items.push_info(start_header_info);
items.push_str("for");
if context.config.for_of_statement_space_after_for_keyword {
items.push_str(" ");
}
if let Some(await_token) = &node.await_token {
items.extend(parse_node(await_token.into(), context));
items.push_str(" ");
}
let inner_header_span = create_span_data(node.left.lo(), node.right.hi());
items.extend(parse_node_in_parens(
|context| {
let mut items = PrintItems::new();
items.extend(parse_node((&node.left).into(), context));
items.push_signal(Signal::SpaceOrNewLine);
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
items.push_str("of ");
items.extend(parse_node((&node.right).into(), context));
items
}));
items
},
ParseNodeInParensOptions {
inner_span: inner_header_span,
prefer_hanging: context.config.for_of_statement_prefer_hanging,
allow_open_paren_trailing_comments: false,
},
context
));
items.push_info(end_header_info);
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: (&node.body).into(),
use_braces: context.config.for_of_statement_use_braces,
brace_position: context.config.for_of_statement_brace_position,
single_body_position: Some(context.config.for_of_statement_single_body_position),
requires_braces_condition_ref: None,
header_start_token: None,
start_header_info: Some(start_header_info),
end_header_info: Some(end_header_info),
}, context).parsed_node);
return items;
}
fn parse_if_stmt<'a>(node: &'a IfStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let cons = &*node.cons;
let cons_span_data = cons.span_data();
let result = parse_header_with_conditional_brace_body(ParseHeaderWithConditionalBraceBodyOptions {
parent: node.span,
body_node: cons.into(),
parsed_header: {
let mut items = PrintItems::new();
items.push_str("if");
if context.config.if_statement_space_after_if_keyword { items.push_str(" "); }
let test = &*node.test;
items.extend(parse_node_in_parens(
|context| parse_node(test.into(), context),
ParseNodeInParensOptions {
inner_span: test.span_data(),
prefer_hanging: context.config.if_statement_prefer_hanging,
allow_open_paren_trailing_comments: false,
},
context
));
items
},
use_braces: context.config.if_statement_use_braces,
brace_position: context.config.if_statement_brace_position,
single_body_position: Some(context.config.if_statement_single_body_position),
requires_braces_condition_ref: context.take_if_stmt_last_brace_condition_ref(),
}, context);
let if_stmt_start_info = Info::new("ifStmtStart");
items.push_info(if_stmt_start_info);
items.extend(result.parsed_node);
if let Some(alt) = &node.alt {
let alt = &**alt;
if let Stmt::If(alt_alt) = alt {
if alt_alt.alt.is_none() {
context.store_if_stmt_last_brace_condition_ref(result.open_brace_condition_ref);
}
}
items.extend(parse_control_flow_separator(
context.config.if_statement_next_control_flow_position,
&cons_span_data,
"else",
if_stmt_start_info,
Some(result.close_brace_condition_ref),
context
));
// parse the leading comments before the else keyword
let else_keyword = context.token_finder.get_first_else_keyword_within(&create_span_data(cons_span_data.hi, alt.lo())).expect("Expected to find an else keyword.");
items.extend(parse_leading_comments(else_keyword, context));
items.extend(parse_leading_comments(alt, context));
let start_else_header_info = Info::new("startElseHeader");
items.push_info(start_else_header_info);
items.push_str("else");
if let Stmt::If(alt) = alt {
items.push_str(" ");
items.extend(parse_node(alt.into(), context));
} else {
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: alt.into(),
use_braces: context.config.if_statement_use_braces,
brace_position: context.config.if_statement_brace_position,
single_body_position: Some(context.config.if_statement_single_body_position),
requires_braces_condition_ref: Some(result.open_brace_condition_ref),
header_start_token: Some(else_keyword),
start_header_info: Some(start_else_header_info),
end_header_info: None,
}, context).parsed_node);
}
}
return items;
}
fn parse_labeled_stmt<'a>(node: &'a LabeledStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.label).into(), context));
items.push_str(":");
// not bothering to make this configurable, because who uses labeled statements?
if node.body.kind() == NodeKind::BlockStmt {
items.push_str(" ");
} else {
items.push_signal(Signal::NewLine);
}
items.extend(parse_node((&node.body).into(), context));
return items;
}
fn parse_return_stmt<'a>(node: &'a ReturnStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("return");
if let Some(arg) = &node.arg {
items.push_str(" ");
items.extend(parse_node(arg.into(), context));
}
if context.config.semi_colons.is_true() { items.push_str(";"); }
return items;
}
fn parse_switch_stmt<'a>(node: &'a SwitchStmt, context: &mut Context<'a>) -> PrintItems {
let start_header_info = Info::new("startHeader");
let mut items = PrintItems::new();
items.push_info(start_header_info);
items.push_str("switch ");
items.extend(parse_node_in_parens(
|context| parse_node((&node.discriminant).into(), context),
ParseNodeInParensOptions {
inner_span: node.discriminant.span_data(),
prefer_hanging: context.config.switch_statement_prefer_hanging,
allow_open_paren_trailing_comments: false,
},
context
));
items.extend(parse_membered_body(ParseMemberedBodyOptions {
span_data: node.span,
members: node.cases.iter().map(|x| x.into()).collect(),
start_header_info: Some(start_header_info),
brace_position: context.config.switch_statement_brace_position,
should_use_blank_line: |previous, next, context| {
// do not put a blank line when the previous case has no body
if let Node::SwitchCase(previous) = previous {
if previous.cons.is_empty() {
return false;
}
}
node_helpers::has_separating_blank_line(previous, next, context)
},
separator: Separator::none(),
}, context));
return items;
}
fn parse_switch_case<'a>(node: &'a SwitchCase, context: &mut Context<'a>) -> PrintItems {
let block_stmt_body = get_block_stmt_body(&node);
let start_header_info = Info::new("switchCaseStartHeader");
let mut items = PrintItems::new();
let colon_token = context.token_finder.get_first_colon_token_after(&if let Some(test) = &node.test {
test.span().hi()
} else {
node.span.lo()
}).expect("Expected to find a colon token.");
items.push_info(start_header_info);
if let Some(test) = &node.test {
items.push_str("case ");
items.extend(parse_node(test.into(), context));
items.push_str(":");
} else {
items.push_str("default:");
}
items.extend(parse_first_line_trailing_comments(&node.span, node.cons.get(0).map(|x| x.span_data()), context));
let parsed_trailing_comments = parse_trailing_comments_for_case(node.span_data(), &block_stmt_body, context);
if !node.cons.is_empty() {
if let Some(block_stmt_body) = block_stmt_body {
items.extend(parse_brace_separator(ParseBraceSeparatorOptions {
brace_position: context.config.switch_case_brace_position,
open_brace_token: context.token_finder.get_first_open_brace_token_within(&block_stmt_body),
start_header_info: None,
}, context));
items.extend(parse_node(node.cons.iter().next().unwrap().into(), context));
} else {
items.push_signal(Signal::NewLine);
items.extend(parser_helpers::with_indent(parse_statements_or_members(ParseStatementsOrMembersOptions {
inner_span_data: create_span_data(colon_token.hi(), node.span.hi()),
items: node.cons.iter().map(|node| (node.into(), None)).collect(),
should_use_space: None,
should_use_new_line: None,
should_use_blank_line: |previous, next, context| node_helpers::has_separating_blank_line(previous, next, context),
separator: Separator::none(),
}, context)));
}
}
items.extend(parsed_trailing_comments);
return items;
fn get_block_stmt_body(node: &SwitchCase) -> Option<Span> {
let first_cons = node.cons.get(0);
if let Some(Stmt::Block(block_stmt)) = first_cons {
if node.cons.len() == 1 {
return Some(block_stmt.span);
}
}
return None;
}
fn parse_trailing_comments_for_case<'a>(node_span_data: Span, block_stmt_body: &Option<Span>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
// parse the trailing comments as statements
let trailing_comments = get_trailing_comments_as_statements(&node_span_data, context);
if !trailing_comments.is_empty() {
if let Node::SwitchStmt(stmt) = context.parent() {
let last_case = stmt.cases.iter().last();
let is_last_case = match last_case { Some(last_case) => last_case.lo() == node_span_data.lo, _=> false };
let mut is_equal_indent = block_stmt_body.is_some();
let mut last_node = node_span_data;
for comment in trailing_comments {
is_equal_indent = is_equal_indent || comment.start_column(context) <= last_node.start_column(context);
let parsed_comment = parse_comment_based_on_last_node(&comment, &Some(&last_node), ParseCommentBasedOnLastNodeOptions {
separate_with_newlines: true
}, context);
items.extend(if !is_last_case && is_equal_indent {
parsed_comment
} else {
parser_helpers::with_indent(parsed_comment)
});
last_node = comment.span_data();
}
}
}
return items;
}
}
fn parse_throw_stmt<'a>(node: &'a ThrowStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("throw ");
items.extend(parse_node((&node.arg).into(), context));
if context.config.semi_colons.is_true() { items.push_str(";"); }
return items;
}
fn parse_try_stmt<'a>(node: &'a TryStmt, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let brace_position = context.config.try_statement_brace_position;
let next_control_flow_position = context.config.try_statement_next_control_flow_position;
let mut last_block_span_data = node.block.span;
let mut last_block_start_info = Info::new("tryStart");
items.push_info(last_block_start_info);
items.push_str("try");
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: (&node.block).into(),
use_braces: UseBraces::Always, // braces required
brace_position: context.config.try_statement_brace_position,
single_body_position: Some(SingleBodyPosition::NextLine),
requires_braces_condition_ref: None,
header_start_token: None,
start_header_info: None,
end_header_info: None,
}, context).parsed_node);
if let Some(handler) = &node.handler {
let handler_start_info = Info::new("handlerStart");
items.push_info(handler_start_info);
items.extend(parse_control_flow_separator(
next_control_flow_position,
&last_block_span_data,
"catch",
last_block_start_info,
None,
context
));
last_block_span_data = handler.span;
items.extend(parse_node(handler.into(), context));
// set the next block to check the handler start info
last_block_start_info = handler_start_info;
}
if let Some(finalizer) = &node.finalizer {
items.extend(parse_control_flow_separator(
next_control_flow_position,
&last_block_span_data,
"finally",
last_block_start_info,
None,
context
));
items.push_str("finally");
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: finalizer.into(),
use_braces: UseBraces::Always, // braces required
brace_position,
single_body_position: Some(SingleBodyPosition::NextLine),
requires_braces_condition_ref: None,
header_start_token: None,
start_header_info: None,
end_header_info: None,
}, context).parsed_node);
}
return items;
}
fn parse_var_decl<'a>(node: &'a VarDecl, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let force_use_new_lines = get_use_new_lines(&node.decls, context);
if node.declare { items.push_str("declare "); }
items.push_str(match node.kind {
VarDeclKind::Const => "const ",
VarDeclKind::Let => "let ",
VarDeclKind::Var => "var ",
});
let decls_len = node.decls.len();
if decls_len == 1 {
// be lightweight by default
items.extend(parse_node((&node.decls[0]).into(), context));
} else if decls_len > 1 {
items.extend(parse_separated_values(ParseSeparatedValuesOptions {
nodes: node.decls.iter().map(|p| Some(p.into())).collect(),
prefer_hanging: context.config.variable_statement_prefer_hanging,
force_use_new_lines,
allow_blank_lines: false,
separator: TrailingCommas::Never.into(),
single_line_space_at_start: false,
single_line_space_at_end: false,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::same_line_start_hanging_indent(),
force_possible_newline_at_start: false,
}, context));
}
if requires_semi_colon(&node.span, context) { items.push_str(";"); }
return items;
fn requires_semi_colon(var_decl_span_data: &Span, context: &mut Context) -> bool {
let use_semi_colons = context.config.semi_colons.is_true();
use_semi_colons && match context.parent() {
Node::ForInStmt(node) => var_decl_span_data.lo >= node.body.span().lo(),
Node::ForOfStmt(node) => var_decl_span_data.lo >= node.body.span().lo(),
Node::ForStmt(node) => var_decl_span_data.lo >= node.body.span().lo(),
_ => use_semi_colons,
}
}
fn get_use_new_lines(decls: &Vec<VarDeclarator>, context: &mut Context) -> bool {
get_use_new_lines_for_nodes(decls, context.config.variable_statement_prefer_single_line, context)
}
}
fn parse_var_declarator<'a>(node: &'a VarDeclarator, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.name).into(), context));
if let Some(init) = &node.init {
items.extend(parse_assignment(init.into(), "=", context));
}
// Indent the first variable declarator when there are multiple.
// Not ideal, but doing this here because of the abstraction used in
// `parse_var_decl`. In the future this should probably be moved away.
if let Node::VarDecl(var_dec) = context.parent() {
if var_dec.decls.len() > 1 && &var_dec.decls[0] == node {
let items = items.into_rc_path();
if_true_or(
"indentIfNotStartOfLine",
|context| Some(!condition_resolvers::is_start_of_line(context)),
with_indent(items.clone().into()),
items.into(),
).into()
} else {
items
}
} else {
items
}
}
fn parse_while_stmt<'a>(node: &'a WhileStmt, context: &mut Context<'a>) -> PrintItems {
let start_header_info = Info::new("startHeader");
let end_header_info = Info::new("endHeader");
let mut items = PrintItems::new();
items.push_info(start_header_info);
items.push_str("while");
if context.config.while_statement_space_after_while_keyword {
items.push_str(" ");
}
items.extend(parse_node_in_parens(
|context| parse_node((&node.test).into(), context),
ParseNodeInParensOptions {
inner_span: node.test.span_data(),
prefer_hanging: context.config.while_statement_prefer_hanging,
allow_open_paren_trailing_comments: false,
},
context
));
items.push_info(end_header_info);
items.extend(parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: node.span,
body_node: (&node.body).into(),
use_braces: context.config.while_statement_use_braces,
brace_position: context.config.while_statement_brace_position,
single_body_position: Some(context.config.while_statement_single_body_position),
requires_braces_condition_ref: None,
header_start_token: None,
start_header_info: Some(start_header_info),
end_header_info: Some(end_header_info),
}, context).parsed_node);
return items;
}
/* types */
fn parse_array_type<'a>(node: &'a TsArrayType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.elem_type).into(), context));
items.push_str("[]");
return items;
}
fn parse_conditional_type<'a>(node: &'a TsConditionalType, context: &mut Context<'a>) -> PrintItems {
let use_new_lines = !context.config.conditional_type_prefer_single_line
&& node_helpers::get_use_new_lines_for_nodes(&*node.true_type, &*node.false_type, context);
let top_most_data = get_top_most_data(node, context);
let is_parent_conditional_type = context.parent().kind() == NodeKind::TsConditionalType;
let mut items = PrintItems::new();
let before_false_info = Info::new("beforeFalse");
// main area
items.extend(parser_helpers::new_line_group(parse_node((&node.check_type).into(), context)));
items.push_str(" extends"); // do not newline before because it's a parsing error
items.push_signal(Signal::SpaceOrNewLine);
if top_most_data.is_top_most {
items.push_info(top_most_data.top_most_info);
}
items.push_condition(conditions::indent_if_start_of_line(parser_helpers::new_line_group(parse_node((&node.extends_type).into(), context))));
items.push_signal(Signal::SpaceOrNewLine);
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
items.push_str("? ");
items.extend(parser_helpers::new_line_group(parse_node((&node.true_type).into(), context)));
items
}));
// false type
if use_new_lines {
items.push_signal(Signal::NewLine);
} else {
items.push_condition(conditions::new_line_if_multiple_lines_space_or_new_line_otherwise(top_most_data.top_most_info, Some(before_false_info)));
}
let false_type_parsed = {
let mut items = PrintItems::new();
items.push_info(before_false_info);
items.push_str(": ");
items.extend(parser_helpers::new_line_group(parse_node((&node.false_type).into(), context)));
items
};
if is_parent_conditional_type {
items.extend(false_type_parsed);
} else {
items.push_condition(conditions::indent_if_start_of_line(false_type_parsed));
}
return items;
struct TopMostData {
top_most_info: Info,
is_top_most: bool,
}
fn get_top_most_data(node: &TsConditionalType, context: &mut Context) -> TopMostData {
// todo: consolidate with conditional expression
// The "top most" node in nested conditionals follows the ancestors up through
// the false expressions.
let mut top_most_node = node;
for ancestor in context.parent_stack.iter() {
if let Node::TsConditionalType(parent) = ancestor {
if parent.false_type.lo() == top_most_node.lo() {
top_most_node = parent;
} else {
break;
}
} else {
break;
}
}
let is_top_most = top_most_node == node;
let top_most_info = get_or_set_top_most_info(top_most_node.lo(), is_top_most, context);
return TopMostData {
is_top_most,
top_most_info,
};
fn get_or_set_top_most_info(top_most_expr_start: BytePos, is_top_most: bool, context: &mut Context) -> Info {
if is_top_most {
let info = Info::new("conditionalTypeStart");
context.store_info_for_node(&top_most_expr_start, info);
info
} else {
context.get_info_for_node(&top_most_expr_start).expect("Expected to have the top most expr info stored")
}
}
}
}
fn parse_constructor_type<'a>(node: &'a TsConstructorType, context: &mut Context<'a>) -> PrintItems {
let start_info = Info::new("startConstructorType");
let mut items = PrintItems::new();
items.push_info(start_info);
items.push_str("new");
if context.config.constructor_type_space_after_new_keyword { items.push_str(" "); }
if let Some(type_params) = &node.type_params {
items.extend(parse_node(type_params.into(), context));
}
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: node.params.iter().map(|node| node.into()).collect(),
custom_close_paren: |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info,
type_node: Some((&node.type_ann).into()),
type_node_separator: Some({
let mut items = PrintItems::new();
items.push_str(" =>");
items.push_signal(Signal::SpaceIfNotTrailing);
items.push_signal(Signal::PossibleNewLine);
items
}),
param_count: node.params.len(),
}, context)),
is_parameters: true,
}, context));
items
}
fn parse_function_type<'a>(node: &'a TsFnType, context: &mut Context<'a>) -> PrintItems {
let start_info = Info::new("startFunctionType");
let mut items = PrintItems::new();
let mut indent_after_arrow_condition = if_true(
"indentIfIsStartOfLineAfterArrow",
|context| Some(condition_resolvers::is_start_of_line(&context)),
Signal::StartIndent.into()
);
let indent_after_arrow_condition_ref = indent_after_arrow_condition.get_reference();
items.push_info(start_info);
if let Some(type_params) = &node.type_params {
items.extend(parse_node(type_params.into(), context));
}
items.extend(parse_parameters_or_arguments(ParseParametersOrArgumentsOptions {
span_data: node.get_parameters_span_data(context),
nodes: node.params.iter().map(|node| node.into()).collect(),
custom_close_paren: |context| Some(parse_close_paren_with_type(ParseCloseParenWithTypeOptions {
start_info,
type_node: Some((&node.type_ann).into()),
type_node_separator: {
let mut items = PrintItems::new();
items.push_str(" =>");
items.push_signal(Signal::SpaceIfNotTrailing);
items.push_signal(Signal::PossibleNewLine);
items.push_condition(indent_after_arrow_condition);
Some(items)
},
param_count: node.params.len(),
}, context)),
is_parameters: true,
}, context));
items.push_condition(if_true(
"shouldFinishIndent",
move |context| context.get_resolved_condition(&indent_after_arrow_condition_ref),
Signal::FinishIndent.into()
));
return items;
}
fn parse_import_type<'a>(node: &'a TsImportType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("import(");
items.extend(parse_node((&node.arg).into(), context));
items.push_str(")");
if let Some(qualifier) = &node.qualifier {
items.push_str(".");
items.extend(parse_node(qualifier.into(), context));
}
if let Some(type_args) = &node.type_args {
items.extend(parse_node(type_args.into(), context));
}
return items;
}
fn parse_indexed_access_type<'a>(node: &'a TsIndexedAccessType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.obj_type).into(), context));
items.extend(parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: node.index_type.span_data(),
inner_items: parse_node((&node.index_type).into(), context),
}, context));
return items;
}
fn parse_infer_type<'a>(node: &'a TsInferType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("infer ");
items.extend(parse_node((&node.type_param).into(), context));
return items;
}
fn parse_intersection_type<'a>(node: &'a TsIntersectionType, context: &mut Context<'a>) -> PrintItems {
parse_union_or_intersection_type(UnionOrIntersectionType {
span_data: node.span,
types: &node.types,
is_union: false,
}, context)
}
fn parse_lit_type<'a>(node: &'a TsLitType, context: &mut Context<'a>) -> PrintItems {
match &node.lit {
// need to do this in order to support negative numbers
TsLit::Number(_) => node.text(context).into(),
_ => parse_node((&node.lit).into(), context)
}
}
fn parse_mapped_type<'a>(node: &'a TsMappedType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let start_info = Info::new("startMappedType");
let end_info = Info::new("endMappedType");
let open_brace_token = context.token_finder.get_first_open_brace_token_within(node).expect("Expected to find an open brace token in the mapped type.");
let force_use_new_lines = !context.config.mapped_type_prefer_single_line && node_helpers::get_use_new_lines_for_nodes(open_brace_token, &node.type_param, context);
let mut is_multiple_lines_condition = if_true_or(
"mappedTypeNewLine",
move |context| {
if force_use_new_lines {
Some(true)
} else {
condition_resolvers::is_multiple_lines(context, &start_info, &end_info)
}
},
Signal::NewLine.into(),
Signal::SpaceOrNewLine.into(),
);
let is_multiple_lines = is_multiple_lines_condition.get_reference().create_resolver();
items.push_info(start_info);
items.push_str("{");
items.push_condition(is_multiple_lines_condition.clone());
items.push_condition(conditions::indent_if_start_of_line(parser_helpers::new_line_group({
let mut items = PrintItems::new();
if let Some(readonly) = node.readonly {
items.push_str(match readonly {
TruePlusMinus::True => "readonly ",
TruePlusMinus::Plus => "+readonly ",
TruePlusMinus::Minus => "-readonly ",
});
}
items.extend(parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: node.type_param.span_data(),
inner_items: parse_node((&node.type_param).into(), context),
}, context));
if let Some(optional) = node.optional {
items.push_str(match optional {
TruePlusMinus::True => "?",
TruePlusMinus::Plus => "+?",
TruePlusMinus::Minus => "-?",
});
}
items.extend(parse_type_ann_with_colon_if_exists_for_type(&node.type_ann, context));
items.extend(get_parsed_semi_colon(context.config.semi_colons, true, &is_multiple_lines));
items
})));
items.push_condition(is_multiple_lines_condition);
items.push_str("}");
items.push_info(end_info);
return items;
}
fn parse_optional_type<'a>(node: &'a TsOptionalType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.type_ann).into(), context));
items.push_str("?");
return items;
}
fn parse_qualified_name<'a>(node: &'a TsQualifiedName, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.left).into(), context));
items.push_str(".");
items.extend(parse_node((&node.right).into(), context));
return items;
}
fn parse_parenthesized_type<'a>(node: &'a TsParenthesizedType, context: &mut Context<'a>) -> PrintItems {
let parsed_type = conditions::with_indent_if_start_of_line_indented(parse_node_in_parens(
|context| parse_node((&node.type_ann).into(), context),
ParseNodeInParensOptions {
inner_span: node.type_ann.span_data(),
prefer_hanging: true,
allow_open_paren_trailing_comments: true,
},
context
)).into();
return if use_new_line_group(context) { new_line_group(parsed_type) } else { parsed_type };
fn use_new_line_group(context: &mut Context) -> bool {
match context.parent() {
Node::TsTypeAliasDecl(_) => false,
_ => true,
}
}
}
fn parse_rest_type<'a>(node: &'a TsRestType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("...");
items.extend(parse_node((&node.type_ann).into(), context));
return items;
}
fn parse_tuple_type<'a>(node: &'a TsTupleType, context: &mut Context<'a>) -> PrintItems {
parse_array_like_nodes(ParseArrayLikeNodesOptions {
parent_span_data: node.span,
nodes: node.elem_types.iter().map(|x| Some(x.into())).collect(),
prefer_hanging: context.config.tuple_type_prefer_hanging,
prefer_single_line: context.config.tuple_type_prefer_single_line,
trailing_commas: context.config.tuple_type_trailing_commas,
}, context)
}
fn parse_tuple_element<'a>(node: &'a TsTupleElement, context: &mut Context<'a>) -> PrintItems {
if let Some(label) = &node.label {
let mut items = PrintItems::new();
items.extend(parse_node(label.into(), context));
items.extend(parse_type_ann_with_colon_for_type(&node.ty, context));
items
} else {
parse_node((&node.ty).into(), context)
}
}
fn parse_type_ann<'a>(node: &'a TsTypeAnn, context: &mut Context<'a>) -> PrintItems {
parse_node((&node.type_ann).into(), context)
}
fn parse_type_param<'a>(node: &'a TsTypeParam, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.name).into(), context));
if let Some(constraint) = &node.constraint {
items.push_signal(Signal::SpaceOrNewLine);
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
items.push_str(if context.parent().kind() == NodeKind::TsMappedType {
"in"
} else {
"extends"
});
items.push_signal(Signal::SpaceIfNotTrailing);
items.extend(parse_node(constraint.into(), context));
items
}));
}
if let Some(default) = &node.default {
items.extend(parse_assignment(default.into(), "=", context));
}
return items;
}
fn parse_type_parameters<'a>(node: TypeParamNode<'a>, context: &mut Context<'a>) -> PrintItems {
let params = node.params();
let node_span_data = node.span();
let force_use_new_lines = get_use_new_lines(&node_span_data, ¶ms, context);
let mut items = PrintItems::new();
items.push_str("<");
items.extend(parse_separated_values(ParseSeparatedValuesOptions {
nodes: params.into_iter().map(|p| Some(p)).collect(),
prefer_hanging: context.config.type_parameters_prefer_hanging,
force_use_new_lines,
allow_blank_lines: false,
separator: get_trailing_commas(&node_span_data, context).into(),
single_line_space_at_start: false,
single_line_space_at_end: false,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::surround_newlines_indented(),
force_possible_newline_at_start: false,
}, context));
items.push_str(">");
return items;
fn get_trailing_commas(node_span_data: &Span, context: &mut Context) -> TrailingCommas {
let trailing_commas = context.config.type_parameters_trailing_commas;
if trailing_commas == TrailingCommas::Never { return trailing_commas; }
// trailing commas should be allowed in type parameters only—not arguments
if let Some(type_params) = context.parent().get_type_parameters() {
if type_params.lo() == node_span_data.lo() {
return trailing_commas;
}
}
return TrailingCommas::Never;
}
fn get_use_new_lines(parent_span_data: &Span, params: &Vec<Node>, context: &mut Context) -> bool {
if context.config.type_parameters_prefer_single_line || params.is_empty() {
false
} else {
let first_param = ¶ms[0];
let angle_bracket_pos = parent_span_data.lo;
node_helpers::get_use_new_lines_for_nodes(&angle_bracket_pos, first_param, context)
}
}
}
fn parse_type_operator<'a>(node: &'a TsTypeOperator, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str(match node.op {
TsTypeOperatorOp::KeyOf => "keyof",
TsTypeOperatorOp::Unique => "unique",
TsTypeOperatorOp::ReadOnly => "readonly",
});
items.push_signal(Signal::SpaceIfNotTrailing);
items.extend(parse_node((&node.type_ann).into(), context));
return items;
}
fn parse_type_predicate<'a>(node: &'a TsTypePredicate, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if node.asserts { items.push_str("asserts "); }
items.extend(parse_node((&node.param_name).into(), context));
if let Some(type_ann) = &node.type_ann {
items.push_str(" is");
items.push_signal(Signal::SpaceIfNotTrailing);
items.extend(parse_node(type_ann.into(), context));
}
return items;
}
fn parse_type_query<'a>(node: &'a TsTypeQuery, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("typeof");
items.push_signal(Signal::SpaceIfNotTrailing);
items.extend(parse_node((&node.expr_name).into(), context));
return items;
}
fn parse_type_reference<'a>(node: &'a TsTypeRef, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
items.extend(parse_node((&node.type_name).into(), context));
if let Some(type_params) = &node.type_params {
items.extend(parse_node(type_params.into(), context));
}
return items;
}
fn parse_union_type<'a>(node: &'a TsUnionType, context: &mut Context<'a>) -> PrintItems {
parse_union_or_intersection_type(UnionOrIntersectionType {
span_data: node.span,
types: &node.types,
is_union: true,
}, context)
}
struct UnionOrIntersectionType<'a> {
pub span_data: Span,
pub types: &'a Vec<Box<TsType>>,
pub is_union: bool,
}
fn parse_union_or_intersection_type<'a>(node: UnionOrIntersectionType<'a>, context: &mut Context<'a>) -> PrintItems {
// todo: configuration for operator position
let mut items = PrintItems::new();
let force_use_new_lines = get_use_new_lines_for_nodes(&node.types, context.config.union_and_intersection_type_prefer_single_line, context);
let separator = if node.is_union { "|" } else { "&" };
let leading_comments = node.span_data.leading_comments(context);
let has_leading_comments = !leading_comments.is_empty();
let indent_width = context.config.indent_width;
let prefer_hanging = context.config.union_and_intersection_type_prefer_hanging;
let is_parent_union_or_intersection = match context.parent().kind() {
NodeKind::TsUnionType | NodeKind::TsIntersectionType => true,
_ => false,
};
let multi_line_options = if !is_parent_union_or_intersection {
if use_surround_newlines(context) {
parser_helpers::MultiLineOptions::surround_newlines_indented()
} else if has_leading_comments {
parser_helpers::MultiLineOptions::same_line_no_indent()
} else {
parser_helpers::MultiLineOptions::new_line_start()
}
} else if has_leading_comments {
parser_helpers::MultiLineOptions::same_line_no_indent()
} else {
parser_helpers::MultiLineOptions::same_line_start_hanging_indent()
};
let parse_result = parser_helpers::parse_separated_values(|is_multi_line_or_hanging_ref| {
let is_multi_line_or_hanging = is_multi_line_or_hanging_ref.create_resolver();
let types_count = node.types.len();
let mut parsed_nodes = Vec::new();
for (i, type_node) in node.types.into_iter().enumerate() {
let (allow_inline_multi_line, allow_inline_single_line) = {
let is_last_value = i + 1 == types_count; // allow the last type to be single line
(allows_inline_multi_line(&(&**type_node).into(), types_count > 1), is_last_value)
};
let separator_token = context.token_finder.get_previous_token_if_operator(&type_node.span(), separator);
let start_info = Info::new("startInfo");
let after_separator_info = Info::new("afterSeparatorInfo");
let mut items = PrintItems::new();
items.push_info(start_info);
if let Some(separator_token) = separator_token {
items.extend(parse_leading_comments(separator_token, context));
}
if i == 0 && !is_parent_union_or_intersection {
items.push_condition(if_true(
"separatorIfMultiLine",
is_multi_line_or_hanging.clone(),
separator.into(),
));
} else if i > 0 {
items.push_str(separator);
}
if let Some(separator_token) = separator_token {
items.extend(parse_trailing_comments(separator_token, context));
}
items.push_info(after_separator_info);
items.push_condition(if_true(
"afterSeparatorSpace",
move |condition_context| {
let is_on_same_line = condition_resolvers::is_on_same_line(condition_context, &after_separator_info)?;
let is_at_same_position = condition_resolvers::is_at_same_position(condition_context, &start_info)?;
return Some(is_on_same_line && !is_at_same_position);
},
Signal::SpaceIfNotTrailing.into(),
));
items.extend(parse_node(type_node.into(), context));
parsed_nodes.push(parser_helpers::ParsedValue {
items,
lines_span: None,
allow_inline_multi_line,
allow_inline_single_line,
});
}
parsed_nodes
}, parser_helpers::ParseSeparatedValuesOptions {
prefer_hanging,
force_use_new_lines,
allow_blank_lines: false,
single_line_space_at_start: false,
single_line_space_at_end: false,
single_line_separator: Signal::SpaceOrNewLine.into(),
indent_width,
multi_line_options,
force_possible_newline_at_start: false,
});
items.extend(parse_result.items);
return items;
fn use_surround_newlines(context: &mut Context) -> bool {
match context.parent() {
Node::TsTypeAssertion(_) | Node::TsParenthesizedType(_) => true,
_ => false,
}
}
}
/* comments */
fn parse_leading_comments<'a>(node: &dyn SpanDataContainer, context: &mut Context<'a>) -> PrintItems {
let leading_comments = node.leading_comments(context);
parse_comments_as_leading(node, leading_comments, context)
}
fn parse_comments_as_leading<'a>(node: &dyn SpanDataContainer, comments: CommentsIterator<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if let Some(last_comment) = comments.get_last_comment() {
let last_comment_previously_handled = context.has_handled_comment(&last_comment);
items.extend(parse_comment_collection(comments, None, Some(node), context));
// todo: this doesn't seem exactly right...
if !last_comment_previously_handled {
let node_start_line = node.start_line(context);
let last_comment_end_line = last_comment.end_line(context);
if node_start_line > last_comment_end_line {
items.push_signal(Signal::NewLine);
if node_start_line - 1 > last_comment_end_line {
items.push_signal(Signal::NewLine);
}
}
else if last_comment.kind == CommentKind::Block && node_start_line == last_comment_end_line {
items.push_signal(Signal::SpaceIfNotTrailing);
}
}
}
items
}
fn parse_trailing_comments_as_statements<'a>(node: &dyn SpanDataContainer, context: &mut Context<'a>) -> PrintItems {
let unhandled_comments = get_trailing_comments_as_statements(node, context);
parse_comments_as_statements(unhandled_comments.into_iter(), Some(node), context)
}
fn get_trailing_comments_as_statements<'a>(node: &dyn SpanDataContainer, context: &mut Context<'a>) -> Vec<&'a Comment> {
let mut comments = Vec::new();
let node_end_line = node.end_line(context);
for comment in node.trailing_comments(context) {
if !context.has_handled_comment(&comment) && node_end_line < comment.end_line(context) {
comments.push(comment);
}
}
comments
}
fn parse_comments_as_statements<'a>(comments: impl Iterator<Item=&'a Comment>, last_node: Option<&dyn SpanDataContainer>, context: &mut Context<'a>) -> PrintItems {
let mut last_node = last_node;
let mut items = PrintItems::new();
for comment in comments {
if !context.has_handled_comment(comment) {
items.extend(parse_comment_based_on_last_node(comment, &last_node, ParseCommentBasedOnLastNodeOptions {
separate_with_newlines: true
}, context));
last_node = Some(comment);
}
}
items
}
fn parse_comment_collection<'a>(comments: impl Iterator<Item=&'a Comment>, last_node: Option<&dyn SpanDataContainer>, next_node: Option<&dyn SpanDataContainer>, context: &mut Context<'a>) -> PrintItems {
let mut last_node = last_node;
let mut items = PrintItems::new();
let next_node_start_line = next_node.map(|n| n.start_line(context));
for comment in comments {
if !context.has_handled_comment(comment) {
items.extend(parse_comment_based_on_last_node(comment, &last_node, ParseCommentBasedOnLastNodeOptions {
separate_with_newlines: if let Some(next_node_start_line) = next_node_start_line {
comment.start_line(context) != next_node_start_line
} else {
false
}
}, context));
last_node = Some(comment);
}
}
items
}
struct ParseCommentBasedOnLastNodeOptions {
separate_with_newlines: bool,
}
fn parse_comment_based_on_last_node(comment: &Comment, last_node: &Option<&dyn SpanDataContainer>, opts: ParseCommentBasedOnLastNodeOptions, context: &mut Context) -> PrintItems {
let mut items = PrintItems::new();
let mut pushed_ignore_new_lines = false;
if let Some(last_node) = last_node {
let comment_start_line = comment.start_line(context);
let last_node_end_line = last_node.end_line(context);
if opts.separate_with_newlines || comment_start_line > last_node_end_line {
items.push_signal(Signal::NewLine);
if comment_start_line > last_node_end_line + 1 {
items.push_signal(Signal::NewLine);
}
} else if comment.kind == CommentKind::Line {
items.push_signal(Signal::StartForceNoNewLines);
items.push_str(" ");
pushed_ignore_new_lines = true;
} else if last_node.text(context).starts_with("/*") {
items.push_str(" ");
}
}
if let Some(parsed_comment) = parse_comment(&comment, context) {
items.extend(parsed_comment);
}
if pushed_ignore_new_lines {
items.push_signal(Signal::FinishForceNoNewLines);
}
return items;
}
fn parse_comment(comment: &Comment, context: &mut Context) -> Option<PrintItems> {
// only parse if handled
if context.has_handled_comment(comment) {
return None;
}
// mark handled and parse
context.mark_comment_handled(comment);
return Some(match comment.kind {
CommentKind::Block => parse_comment_block(comment),
CommentKind::Line => parser_helpers::parse_js_like_comment_line(&comment.text, context.config.comment_line_force_space_after_slashes),
});
fn parse_comment_block(comment: &Comment) -> PrintItems {
let mut items = PrintItems::new();
items.push_str("/*");
items.extend(parse_raw_string(&comment.text));
items.push_str("*/");
items
}
}
fn parse_first_line_trailing_comments<'a>(node: &dyn SpanDataContainer, first_member: Option<Span>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let node_start_line = node.start_line(context);
for comment in get_comments(&node, &first_member, context) {
if comment.start_line(context) == node_start_line {
if let Some(parsed_comment) = parse_comment(comment, context) {
if comment.kind == CommentKind::Line {
items.push_str(" ");
}
items.extend(parsed_comment);
}
}
}
return items;
fn get_comments<'a>(node: &dyn SpanDataContainer, first_member: &Option<Span>, context: &mut Context<'a>) -> Vec<&'a Comment> {
let mut comments = Vec::new();
if let Some(first_member) = first_member {
comments.extend(first_member.leading_comments(context));
}
comments.extend(node.trailing_comments(context));
return comments;
}
}
fn parse_trailing_comments<'a>(node: &dyn SpanDataContainer, context: &mut Context<'a>) -> PrintItems {
let trailing_comments = node.trailing_comments(context);
parse_comments_as_trailing(node, trailing_comments, context)
}
fn parse_comments_as_trailing<'a>(node: &dyn SpanDataContainer, trailing_comments: CommentsIterator<'a>, context: &mut Context<'a>) -> PrintItems {
// use the roslyn definition of trailing comments
let node_end_line = node.end_line(context);
let trailing_comments_on_same_line = trailing_comments.into_iter()
.filter(|c|c.start_line(context) <= node_end_line) // less than or equal instead of just equal in order to include "forgotten" comments
.collect::<Vec<_>>();
let first_unhandled_comment = trailing_comments_on_same_line.iter().filter(|c| !context.has_handled_comment(&c)).next();
let mut items = PrintItems::new();
if let Some(first_unhandled_comment) = first_unhandled_comment {
if first_unhandled_comment.kind == CommentKind::Block {
items.push_str(" ");
}
}
items.extend(parse_comment_collection(trailing_comments_on_same_line.into_iter(), Some(node), None, context));
items
}
fn get_jsx_empty_expr_comments<'a>(node: &JSXEmptyExpr, context: &mut Context<'a>) -> CommentsIterator<'a> {
node.span.hi().leading_comments(context)
}
/* helpers */
struct ParseArrayLikeNodesOptions<'a> {
parent_span_data: Span,
nodes: Vec<Option<Node<'a>>>,
prefer_hanging: bool,
prefer_single_line: bool,
trailing_commas: TrailingCommas,
}
fn parse_array_like_nodes<'a>(opts: ParseArrayLikeNodesOptions<'a>, context: &mut Context<'a>) -> PrintItems {
let parent_span_data = opts.parent_span_data;
let nodes = opts.nodes;
let trailing_commas = if allow_trailing_commas(&nodes) { opts.trailing_commas } else { TrailingCommas::Never };
let prefer_hanging = opts.prefer_hanging;
let force_use_new_lines = get_force_use_new_lines(&parent_span_data, &nodes, opts.prefer_single_line, context);
let mut items = PrintItems::new();
let mut first_member = nodes.get(0).map(|x| x.as_ref().map(|y| y.span_data())).flatten();
if first_member.is_none() {
if let Some(comma_token) = context.token_finder.get_first_comma_within(&parent_span_data) {
first_member.replace(comma_token.span_data());
}
}
items.extend(parse_surrounded_by_tokens(|context| {
parse_separated_values(ParseSeparatedValuesOptions {
nodes: nodes,
prefer_hanging,
force_use_new_lines,
allow_blank_lines: true,
separator: trailing_commas.into(),
single_line_space_at_start: false,
single_line_space_at_end: false,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::surround_newlines_indented(),
force_possible_newline_at_start: false,
}, context)
}, |_| None, ParseSurroundedByTokensOptions {
open_token: "[",
close_token: "]",
span_data: Some(parent_span_data),
first_member,
prefer_single_line_when_empty: true,
allow_open_token_trailing_comments: true,
}, context));
return items;
fn get_force_use_new_lines(node: &dyn Ranged, nodes: &Vec<Option<Node>>, prefer_single_line: bool, context: &mut Context) -> bool {
if prefer_single_line || nodes.is_empty() {
false
} else {
let open_bracket_token = context.token_finder.get_first_open_bracket_token_within(node).expect("Expected to find an open bracket token.");
if let Some(first_node) = &nodes[0] {
node_helpers::get_use_new_lines_for_nodes(open_bracket_token, first_node, context)
} else {
// todo: tests for this (ex. [\n,] -> [\n ,\n])
let first_comma = context.token_finder.get_first_comma_within(node);
if let Some(first_comma) = first_comma {
node_helpers::get_use_new_lines_for_nodes(open_bracket_token, first_comma, context)
} else {
false
}
}
}
}
fn allow_trailing_commas(nodes: &Vec<Option<Node>>) -> bool {
if let Some(Some(last)) = nodes.last() {
// this would be a syntax error
if last.kind() == NodeKind::RestPat {
return false;
}
}
true
}
}
struct ParseMemberedBodyOptions<'a, FShouldUseBlankLine> where FShouldUseBlankLine : Fn(&Node, &Node, &mut Context) -> bool {
span_data: Span,
members: Vec<Node<'a>>,
start_header_info: Option<Info>,
brace_position: BracePosition,
should_use_blank_line: FShouldUseBlankLine,
separator: Separator,
}
fn parse_membered_body<'a, FShouldUseBlankLine>(
opts: ParseMemberedBodyOptions<'a, FShouldUseBlankLine>,
context: &mut Context<'a>
) -> PrintItems
where FShouldUseBlankLine : Fn(&Node, &Node, &mut Context) -> bool
{
let mut items = PrintItems::new();
// todo: no expect here
let open_brace_token = context.token_finder.get_first_open_brace_token_before(&if opts.members.is_empty() { opts.span_data.hi } else { opts.members[0].lo() })
.expect("Expected to find an open brace token.");
let close_brace_token_pos = BytePos(opts.span_data.hi.0 - 1); // todo: assert this is correct
items.extend(parse_brace_separator(ParseBraceSeparatorOptions {
brace_position: opts.brace_position,
open_brace_token: Some(open_brace_token),
start_header_info: opts.start_header_info,
}, context));
let should_use_blank_line = opts.should_use_blank_line;
let separator = opts.separator;
items.extend(parse_block(|members, context| {
parse_statements_or_members(ParseStatementsOrMembersOptions {
inner_span_data: create_span_data(open_brace_token.hi(), close_brace_token_pos.lo()),
items: members.into_iter().map(|node| (node, None)).collect(),
should_use_space: None,
should_use_new_line: None,
should_use_blank_line,
separator,
}, context)
}, ParseBlockOptions {
span_data: Some(create_span_data(open_brace_token.lo(), BytePos(close_brace_token_pos.hi().0 + 1))),
children: opts.members,
}, context));
items
}
fn parse_statements<'a>(inner_span_data: Span, stmts: impl Iterator<Item=Node<'a>>, context: &mut Context<'a>) -> PrintItems {
parse_statements_or_members(ParseStatementsOrMembersOptions {
inner_span_data,
items: stmts.map(|stmt| (stmt, None)).collect(),
should_use_space: None,
should_use_new_line: None,
should_use_blank_line: |previous, next, context| node_helpers::has_separating_blank_line(previous, next, context),
separator: Separator::none(),
}, context)
}
struct ParseStatementsOrMembersOptions<'a, FShouldUseBlankLine> where FShouldUseBlankLine : Fn(&Node, &Node, &mut Context) -> bool {
inner_span_data: Span,
items: Vec<(Node<'a>, Option<PrintItems>)>,
should_use_space: Option<Box<dyn Fn(&Node, &Node, &mut Context) -> bool>>, // todo: Remove putting functions on heap by using type parameters?
should_use_new_line: Option<Box<dyn Fn(&Node, &Node, &mut Context) -> bool>>,
should_use_blank_line: FShouldUseBlankLine,
separator: Separator,
}
fn parse_statements_or_members<'a, FShouldUseBlankLine>(
opts: ParseStatementsOrMembersOptions<'a, FShouldUseBlankLine>,
context: &mut Context<'a>
) -> PrintItems where FShouldUseBlankLine : Fn(&Node, &Node, &mut Context) -> bool
{
let mut last_node: Option<Node> = None;
let mut items = PrintItems::new();
let children_len = opts.items.len();
for (i, (node, optional_print_items)) in opts.items.into_iter().enumerate() {
let is_empty_stmt = match node { Node::EmptyStmt(_) => true, _ => false };
if !is_empty_stmt {
if let Some(last_node) = last_node {
if should_use_new_line(&opts.should_use_new_line, &last_node, &node, context) {
items.push_signal(Signal::NewLine);
if (opts.should_use_blank_line)(&last_node, &node, context) {
items.push_signal(Signal::NewLine);
}
}
else if let Some(should_use_space) = &opts.should_use_space {
if should_use_space(&last_node, &node, context) {
items.push_signal(Signal::SpaceOrNewLine);
}
}
}
let end_info = Info::new("endStatementOrMemberInfo");
context.end_statement_or_member_infos.push(end_info);
items.extend(if let Some(print_items) = optional_print_items {
print_items
} else {
if opts.separator.is_none() {
parse_node(node.clone(), context)
} else {
let parsed_separator = get_parsed_separator(&opts.separator, i == children_len - 1, &|_| Some(true));
parse_node_with_separator(Some(node.clone()), parsed_separator, context)
}
});
items.push_info(end_info);
context.end_statement_or_member_infos.pop();
last_node = Some(node);
} else {
items.extend(parse_comments_as_statements(node.leading_comments(context), None, context));
items.extend(parse_comments_as_statements(node.trailing_comments(context), None, context));
// ensure if this is last that it parses the trailing comment statements
if i == children_len - 1 {
last_node = Some(node);
}
}
}
if let Some(last_node) = &last_node {
items.extend(parse_trailing_comments_as_statements(last_node, context));
}
if children_len == 0 {
items.extend(parse_comments_as_statements(opts.inner_span_data.hi.leading_comments(context), None, context));
}
return items;
fn should_use_new_line(
should_use_new_line: &Option<Box<dyn Fn(&Node, &Node, &mut Context) -> bool>>,
last_node: &Node,
next_node: &Node,
context: &mut Context
) -> bool {
if let Some(should_use) = &should_use_new_line {
return (should_use)(last_node, next_node, context);
}
return true;
}
}
struct ParseParametersOrArgumentsOptions<'a, F> where F : FnOnce(&mut Context<'a>) -> Option<PrintItems> {
span_data: Option<Span>,
nodes: Vec<Node<'a>>,
custom_close_paren: F,
is_parameters: bool,
}
fn parse_parameters_or_arguments<'a, F>(opts: ParseParametersOrArgumentsOptions<'a, F>, context: &mut Context<'a>) -> PrintItems where F : FnOnce(&mut Context<'a>) -> Option<PrintItems> {
let is_parameters = opts.is_parameters;
let prefer_single_line = is_parameters && context.config.parameters_prefer_single_line || !is_parameters && context.config.arguments_prefer_single_line;
let force_use_new_lines = get_use_new_lines_for_nodes_with_preceeding_token("(", &opts.nodes, prefer_single_line, context);
let span_data = opts.span_data;
let custom_close_paren = opts.custom_close_paren;
let first_member_span_data = opts.nodes.iter().map(|n| n.span_data()).next();
let nodes = opts.nodes;
let prefer_hanging = if is_parameters { context.config.parameters_prefer_hanging } else { context.config.arguments_prefer_hanging };
let trailing_commas = get_trailing_commas(&nodes, is_parameters, context);
return parse_surrounded_by_tokens(|context| {
let mut items = PrintItems::new();
if !force_use_new_lines && nodes.len() == 1 && is_arrow_function_with_expr_body(&nodes[0]) {
let start_info = Info::new("startArrow");
let parsed_node = parse_node(nodes.into_iter().next().unwrap(), context);
items.push_info(start_info);
items.push_signal(Signal::PossibleNewLine);
items.push_condition(conditions::indent_if_start_of_line(parsed_node));
items.push_condition(if_true(
"isDifferentLineAndStartLineIndentation",
move |context| {
let start_info = context.get_resolved_info(&start_info)?;
let is_different_line = start_info.line_number != context.writer_info.line_number;
let is_different_start_line_indentation = start_info.line_start_indent_level != context.writer_info.line_start_indent_level;
Some(is_different_line && is_different_start_line_indentation)
},
Signal::NewLine.into()
));
} else {
items.extend(parse_separated_values(ParseSeparatedValuesOptions {
nodes: nodes.into_iter().map(|x| Some(x)).collect(),
prefer_hanging,
force_use_new_lines,
allow_blank_lines: false,
separator: trailing_commas.into(),
single_line_space_at_start: false,
single_line_space_at_end: false,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::surround_newlines_indented(),
force_possible_newline_at_start: is_parameters,
}, context));
}
items
}, custom_close_paren, ParseSurroundedByTokensOptions {
open_token: "(",
close_token: ")",
span_data,
first_member: first_member_span_data,
prefer_single_line_when_empty: true,
allow_open_token_trailing_comments: true,
}, context);
fn get_trailing_commas(nodes: &Vec<Node>, is_parameters: bool, context: &mut Context) -> TrailingCommas {
if let Some(last) = nodes.last() {
// this would be a syntax error
if is_param_rest_pat(last) {
return TrailingCommas::Never;
}
}
return if is_dynamic_import(&context.current_node) {
TrailingCommas::Never // not allowed
} else if is_parameters {
context.config.parameters_trailing_commas
} else {
context.config.arguments_trailing_commas
};
fn is_dynamic_import(node: &Node) -> bool {
if let Node::CallExpr(call_expr) = &node {
if let ExprOrSuper::Expr(expr) = &call_expr.callee {
if let Expr::Ident(ident) = &**expr {
if (&ident.sym as &str) == "import" {
return true;
}
}
}
}
false
}
fn is_param_rest_pat(param: &Node) -> bool {
if let Node::Param(param) = param {
param.pat.kind() == NodeKind::RestPat
} else {
// arrow functions will not be a Param
param.kind() == NodeKind::RestPat
}
}
}
}
struct ParseCloseParenWithTypeOptions<'a> {
start_info: Info,
type_node: Option<Node<'a>>,
type_node_separator: Option<PrintItems>,
param_count: usize,
}
fn parse_close_paren_with_type<'a>(opts: ParseCloseParenWithTypeOptions<'a>, context: &mut Context<'a>) -> PrintItems {
// todo: clean this up a bit
let type_node_start_info = Info::new("typeNodeStart");
let has_type_node = opts.type_node.is_some();
let type_node_end_info = Info::new("typeNodeEnd");
let start_info = opts.start_info;
let parsed_type_node = parse_type_node(opts.type_node, opts.type_node_separator, type_node_start_info, type_node_end_info, opts.param_count, context);
let mut items = PrintItems::new();
items.push_condition(if_true(
"newLineIfHeaderHangingAndTypeNodeMultipleLines",
move |context| {
if !has_type_node { return Some(false); }
if let Some(is_hanging) = condition_resolvers::is_hanging(context, &start_info, &None) {
if let Some(is_multiple_lines) = condition_resolvers::is_multiple_lines(context, &type_node_start_info, &type_node_end_info) {
return Some(is_hanging && is_multiple_lines);
}
}
return None;
},
Signal::NewLine.into(),
));
items.push_str(")");
items.extend(parsed_type_node);
return items;
fn parse_type_node<'a>(
type_node: Option<Node<'a>>,
type_node_separator: Option<PrintItems>,
type_node_start_info: Info,
type_node_end_info: Info,
param_count: usize,
context: &mut Context<'a>
) -> PrintItems {
let mut items = PrintItems::new();
return if let Some(type_node) = type_node {
let use_new_line_group = get_use_new_line_group(param_count, &type_node, context);
items.push_info(type_node_start_info);
if let Some(type_node_separator) = type_node_separator {
items.extend(type_node_separator);
} else {
if context.config.type_annotation_space_before_colon { items.push_str(" "); }
items.push_str(":");
items.push_signal(Signal::SpaceIfNotTrailing);
}
let parsed_type_node = parse_node(type_node.into(), context);
items.extend(parsed_type_node);
items.push_info(type_node_end_info);
if use_new_line_group { new_line_group(items) } else { items }
} else {
items
};
fn get_use_new_line_group(param_count: usize, type_node: &Node, context: &mut Context) -> bool {
if param_count == 0 {
false
} else {
if context.config.parameters_prefer_hanging && param_count > 1 {
// This was done to prevent the second argument becoming hanging, which doesn't
// look good especially when the return type then becomes multi-line.
match type_node {
Node::TsUnionType(_) | Node::TsIntersectionType(_) => false,
Node::TsTypeAnn(type_ann) => match &*type_ann.type_ann {
TsType::TsUnionOrIntersectionType(_) => false,
_ => true,
},
_ => true,
}
} else {
true
}
}
}
}
}
#[derive(PartialEq)]
enum SeparatorValue {
SemiColon(SemiColons),
Comma(TrailingCommas),
}
struct Separator {
single_line: Option<SeparatorValue>,
multi_line: Option<SeparatorValue>,
}
impl Separator {
pub fn none() -> Self {
Separator {
single_line: None,
multi_line: None,
}
}
pub fn is_none(&self) -> bool {
self.single_line.is_none() && self.multi_line.is_none()
}
}
impl From<SemiColons> for Separator {
fn from(value: SemiColons) -> Separator {
Separator {
single_line: Some(SeparatorValue::SemiColon(value)),
multi_line: Some(SeparatorValue::SemiColon(value)),
}
}
}
impl From<TrailingCommas> for Separator {
fn from(value: TrailingCommas) -> Separator {
Separator {
single_line: Some(SeparatorValue::Comma(value)),
multi_line: Some(SeparatorValue::Comma(value)),
}
}
}
struct ParseSeparatedValuesOptions<'a> {
nodes: Vec<Option<Node<'a>>>,
prefer_hanging: bool,
force_use_new_lines: bool,
allow_blank_lines: bool,
separator: Separator,
single_line_space_at_start: bool,
single_line_space_at_end: bool,
custom_single_line_separator: Option<PrintItems>,
multi_line_options: parser_helpers::MultiLineOptions,
force_possible_newline_at_start: bool,
}
#[inline]
fn parse_separated_values<'a>(
opts: ParseSeparatedValuesOptions<'a>,
context: &mut Context<'a>
) -> PrintItems {
parse_separated_values_with_result(opts, context).items
}
fn parse_separated_values_with_result<'a>(
opts: ParseSeparatedValuesOptions<'a>,
context: &mut Context<'a>
) -> ParseSeparatedValuesResult {
let nodes = opts.nodes;
let separator = opts.separator;
let indent_width = context.config.indent_width;
let compute_lines_span = opts.allow_blank_lines; // save time otherwise
parser_helpers::parse_separated_values(|is_multi_line_or_hanging_ref| {
let is_multi_line_or_hanging = is_multi_line_or_hanging_ref.create_resolver();
let mut parsed_nodes = Vec::new();
let nodes_count = nodes.len();
for (i, value) in nodes.into_iter().enumerate() {
let (allow_inline_multi_line, allow_inline_single_line) = if let Some(value) = &value {
let is_last_value = i + 1 == nodes_count; // allow the last node to be single line
(allows_inline_multi_line(value, nodes_count > 1), is_last_value)
} else { (false, false) };
let lines_span = if compute_lines_span {
value.as_ref().map(|x| parser_helpers::LinesSpan{
start_line: x.start_line_with_comments(context),
end_line: x.end_line_with_comments(context)
})
} else { None };
let items = parser_helpers::new_line_group(if separator.is_none() {
if let Some(value) = value {
parse_node(value, context)
} else {
PrintItems::new()
}
} else {
let parsed_separator = get_parsed_separator(&separator, i == nodes_count - 1, &is_multi_line_or_hanging);
parse_node_with_separator(value, parsed_separator, context)
});
parsed_nodes.push(parser_helpers::ParsedValue {
items,
lines_span,
allow_inline_multi_line,
allow_inline_single_line,
});
}
parsed_nodes
}, parser_helpers::ParseSeparatedValuesOptions {
prefer_hanging: opts.prefer_hanging,
force_use_new_lines: opts.force_use_new_lines,
allow_blank_lines: opts.allow_blank_lines,
single_line_space_at_start: opts.single_line_space_at_start,
single_line_space_at_end: opts.single_line_space_at_end,
single_line_separator: opts.custom_single_line_separator.unwrap_or(Signal::SpaceOrNewLine.into()),
indent_width,
multi_line_options: opts.multi_line_options,
force_possible_newline_at_start: opts.force_possible_newline_at_start,
})
}
fn parse_node_with_separator<'a>(value: Option<Node<'a>>, parsed_separator: PrintItems, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let comma_token = get_comma_token(&value, context);
// get the trailing comments after the comma token (if the separator in the file is currently a comma)
let parsed_trailing_comments = if let Some(comma_token) = comma_token {
parse_trailing_comments(comma_token, context)
} else {
PrintItems::new()
};
if let Some(element) = value {
let parsed_separator = parsed_separator.into_rc_path();
items.extend(parse_node_with_inner_parse(element, context, move |mut items, _| {
// this Rc clone is necessary because we can't move the captured parsed_comma out of this closure
items.push_optional_path(parsed_separator.clone());
items
}));
} else {
items.extend(parsed_separator);
}
items.extend(parsed_trailing_comments);
return items;
fn get_comma_token<'a>(element: &Option<Node<'a>>, context: &mut Context<'a>) -> Option<&'a TokenAndSpan> {
if let Some(element) = element {
match context.token_finder.get_next_token_if_comma(element) {
Some(comma) => Some(comma),
None => context.token_finder.get_last_comma_token_within(element), // may occur for type literals
}
} else {
None // not a comma separated node
}
}
}
/// Some nodes don't have a TsTypeAnn, but instead a Box<TsType>
fn parse_type_ann_with_colon_if_exists_for_type<'a>(type_ann: &'a Option<Box<TsType>>, context: &mut Context<'a>) -> PrintItems {
if let Some(type_ann) = type_ann {
parse_type_ann_with_colon_for_type(type_ann, context)
} else {
PrintItems::new()
}
}
fn parse_type_ann_with_colon_for_type<'a>(type_ann: &'a TsType, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if context.config.type_annotation_space_before_colon {
items.push_str(" ");
}
let colon_token = context.token_finder.get_previous_token_if_colon(type_ann);
#[cfg(debug_assertions)]
assert_has_op(":", colon_token, context);
items.extend(parse_type_ann_with_colon(type_ann.into(), colon_token, context));
items
}
fn parse_type_ann_with_colon_if_exists<'a>(type_ann: &'a Option<TsTypeAnn>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if let Some(type_ann) = type_ann {
if context.config.type_annotation_space_before_colon {
items.push_str(" ");
}
let colon_token = context.token_finder.get_first_colon_token_within(type_ann);
#[cfg(debug_assertions)]
assert_has_op(":", colon_token, context);
items.extend(parse_type_ann_with_colon(type_ann.into(), colon_token, context));
}
items
}
fn parse_type_ann_with_colon<'a>(type_ann: Node<'a>, colon_token: Option<&TokenAndSpan>, context: &mut Context<'a>) -> PrintItems {
parse_assignment_like_with_token(type_ann, ":", colon_token, context)
}
struct ParseBraceSeparatorOptions<'a> {
brace_position: BracePosition,
open_brace_token: Option<&'a TokenAndSpan>,
start_header_info: Option<Info>,
}
fn parse_brace_separator<'a>(opts: ParseBraceSeparatorOptions<'a>, context: &mut Context) -> PrintItems {
return match opts.brace_position {
BracePosition::SameLineUnlessHanging => {
if let Some(start_header_info) = opts.start_header_info {
conditions::new_line_if_hanging_space_otherwise(conditions::NewLineIfHangingSpaceOtherwiseOptions {
start_info: start_header_info,
end_info: None,
space_char: Some(space_if_not_start_line()),
}).into()
} else {
space_if_not_start_line()
}
},
BracePosition::SameLine => {
space_if_not_start_line()
},
BracePosition::NextLine => {
Signal::NewLine.into()
},
BracePosition::Maintain => {
if let Some(open_brace_token) = opts.open_brace_token {
if node_helpers::is_first_node_on_line(open_brace_token, context) {
Signal::NewLine.into()
} else {
space_if_not_start_line()
}
} else {
space_if_not_start_line()
}
},
};
fn space_if_not_start_line() -> PrintItems {
if_true(
"spaceIfNotStartLine",
|context| Some(!context.writer_info.is_start_of_line()),
" ".into()
).into()
}
}
struct ParseNodeInParensOptions {
inner_span: Span,
prefer_hanging: bool,
allow_open_paren_trailing_comments: bool,
}
fn parse_node_in_parens<'a>(
parse_node: impl FnOnce(&mut Context<'a>) -> PrintItems,
opts: ParseNodeInParensOptions,
context: &mut Context<'a>
) -> PrintItems {
let inner_span = opts.inner_span;
let paren_span = get_paren_span(&inner_span, context);
let force_use_new_lines = get_force_use_new_lines(inner_span, &paren_span, context);
return parse_surrounded_by_tokens(|context| {
let parsed_node = parse_node(context);
if force_use_new_lines {
surround_with_new_lines(with_indent(parsed_node))
} else if opts.prefer_hanging {
parsed_node
} else {
parser_helpers::surround_with_newlines_indented_if_multi_line(parsed_node, context.config.indent_width)
}
}, |_| None, ParseSurroundedByTokensOptions {
open_token: "(",
close_token: ")",
span_data: paren_span,
first_member: Some(inner_span),
prefer_single_line_when_empty: true,
allow_open_token_trailing_comments: opts.allow_open_paren_trailing_comments,
}, context);
fn get_force_use_new_lines(inner_span: Span, paren_span: &Option<Span>, context: &mut Context) -> bool {
if !context.config.parentheses_prefer_single_line {
if let Some(paren_span) = &paren_span {
if node_helpers::get_use_new_lines_for_nodes(&paren_span.lo(), &inner_span, context) {
return true;
}
}
}
has_any_node_comment_on_different_line(&vec![inner_span], context)
}
}
fn get_paren_span<'a>(inner_span: &Span, context: &mut Context<'a>) -> Option<Span> {
let open_paren = context.token_finder.get_previous_token_if_open_paren(inner_span);
let close_paren = context.token_finder.get_next_token_if_close_paren(inner_span);
if let Some(open_paren) = open_paren {
if let Some(close_paren) = close_paren {
return Some(create_span_data(open_paren.lo(), close_paren.hi()));
}
}
None
}
struct ParseExtendsOrImplementsOptions<'a> {
text: &'a str,
type_items: Vec<Node<'a>>,
start_header_info: Info,
prefer_hanging: bool,
}
fn parse_extends_or_implements<'a>(opts: ParseExtendsOrImplementsOptions<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if opts.type_items.is_empty() {
return items;
}
items.push_condition(conditions::new_line_if_hanging_space_otherwise(conditions::NewLineIfHangingSpaceOtherwiseOptions {
start_info: opts.start_header_info,
end_info: None,
space_char: Some(conditions::if_above_width_or(context.config.indent_width, Signal::SpaceOrNewLine.into(), " ".into()).into()),
}));
// the newline group will force it to put the extends or implements on a new line
items.push_condition(conditions::indent_if_start_of_line(parser_helpers::new_line_group({
let mut items = PrintItems::new();
items.push_str(opts.text);
items.extend(parse_separated_values(ParseSeparatedValuesOptions {
nodes: opts.type_items.into_iter().map(|x| Some(x)).collect(),
prefer_hanging: opts.prefer_hanging,
force_use_new_lines: false,
allow_blank_lines: false,
separator: TrailingCommas::Never.into(),
single_line_space_at_start: true,
single_line_space_at_end: false,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::new_line_start(),
force_possible_newline_at_start: false,
}, context));
items
})));
return items;
}
struct ParseObjectLikeNodeOptions<'a> {
node_span_data: Span,
members: Vec<Node<'a>>,
separator: Separator,
prefer_hanging: bool,
prefer_single_line: bool,
surround_single_line_with_spaces: bool,
}
fn parse_object_like_node<'a>(opts: ParseObjectLikeNodeOptions<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let open_brace_token = context.token_finder.get_first_open_brace_token_within(&opts.node_span_data);
let close_brace_token = context.token_finder.get_last_close_brace_token_within(&opts.node_span_data);
let force_multi_line = get_use_new_lines_for_nodes_with_preceeding_token("{", &opts.members, opts.prefer_single_line, context);
let first_member_span_data = opts.members.get(0).map(|x| x.span_data());
let obj_span_data = if let (Some(open_brace_token), Some(close_brace_token)) = (open_brace_token, close_brace_token) {
Some(create_span_data(open_brace_token.lo(), close_brace_token.hi()))
} else {
None
};
items.extend(parse_surrounded_by_tokens(|context| {
if opts.members.is_empty() {
PrintItems::new()
} else {
parse_separated_values(ParseSeparatedValuesOptions {
nodes: opts.members.into_iter().map(|x| Some(x)).collect(),
prefer_hanging: opts.prefer_hanging,
force_use_new_lines: force_multi_line,
allow_blank_lines: true,
separator: opts.separator,
single_line_space_at_start: opts.surround_single_line_with_spaces,
single_line_space_at_end: opts.surround_single_line_with_spaces,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::surround_newlines_indented(),
force_possible_newline_at_start: false,
}, context)
}
}, |_| None, ParseSurroundedByTokensOptions {
open_token: "{",
close_token: "}",
span_data: obj_span_data,
first_member: first_member_span_data,
prefer_single_line_when_empty: true,
allow_open_token_trailing_comments: true,
}, context));
items
}
struct MemberLikeExpr<'a> {
left_node: Node<'a>,
right_node: Node<'a>,
is_computed: bool,
}
fn parse_for_member_like_expr<'a>(node: MemberLikeExpr<'a>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let force_use_new_line = !context.config.member_expression_prefer_single_line
&& node_helpers::get_use_new_lines_for_nodes(&node.left_node, &node.right_node, context);
let is_optional = context.parent().kind() == NodeKind::OptChainExpr;
let top_most_data = get_top_most_data(context);
if top_most_data.is_top_most {
items.push_info(top_most_data.top_most_start_info);
}
items.extend(parse_node(node.left_node, context));
if is_optional || !node.is_computed {
if force_use_new_line {
items.push_signal(Signal::NewLine);
} else if !context.config.member_expression_line_per_expression {
items.push_condition(conditions::if_above_width(
context.config.indent_width,
Signal::PossibleNewLine.into()
));
} else {
let top_most_start_info = top_most_data.top_most_start_info;
let top_most_end_info = top_most_data.top_most_end_info;
items.push_condition(if_true_or(
"isMultipleLines",
move |context| condition_resolvers::is_multiple_lines(context, &top_most_start_info, &top_most_end_info),
Signal::NewLine.into(),
Signal::PossibleNewLine.into(),
));
}
}
// store this right before the last right expression
if top_most_data.is_top_most {
items.push_info(top_most_data.top_most_end_info);
}
items.push_condition(conditions::indent_if_start_of_line({
let mut items = PrintItems::new();
let is_computed = node.is_computed;
let right_node_span_data = node.right_node.span_data();
items.extend(parse_node_with_inner_parse(node.right_node, context, |node_items, context| {
let mut items = PrintItems::new();
if is_optional {
items.push_str("?");
if is_computed { items.push_str("."); }
}
if is_computed {
items.extend(parse_computed_prop_like(ParseComputedPropLikeOptions {
inner_node_span_data: right_node_span_data,
inner_items: node_items,
}, context));
} else {
items.push_str(".");
items.extend(node_items);
}
items
}));
items
}));
return items;
struct TopMostData {
top_most_start_info: Info,
top_most_end_info: Info,
is_top_most: bool,
}
fn get_top_most_data(context: &mut Context) -> TopMostData {
// The "top most" node follows the ancestors up through the left expressions...
//
// member.expression.test
// left: member.expression
// left: member
// right: expression
// right: test
let current_node = &context.current_node;
let mut top_most_node = &context.current_node;
for ancestor in context.parent_stack.iter() {
if let Node::MemberExpr(_) = ancestor {
top_most_node = ancestor;
} else if let Node::MetaPropExpr(_) = ancestor {
top_most_node = ancestor;
} else {
break;
}
}
let top_most_range = top_most_node.span_data();
let is_top_most = top_most_range.lo() == current_node.lo() && top_most_range.hi() == current_node.hi();
let (top_most_start_info, top_most_end_info) = get_or_set_top_most_infos(&top_most_range, is_top_most, context);
return TopMostData {
is_top_most,
top_most_start_info,
top_most_end_info,
};
fn get_or_set_top_most_infos(range: &impl Ranged, is_top_most: bool, context: &mut Context) -> (Info, Info) {
if is_top_most {
let infos = (Info::new("topMemberStart"), Info::new("topMemberEnd"));
context.store_info_range_for_node(range, infos);
infos
} else {
context.get_info_range_for_node(range).expect("Expected to have the top most expr info stored")
}
}
}
}
struct ParseComputedPropLikeOptions {
inner_node_span_data: Span,
inner_items: PrintItems,
}
fn parse_computed_prop_like<'a>(opts: ParseComputedPropLikeOptions, context: &mut Context<'a>) -> PrintItems {
let inner_node_span_data = opts.inner_node_span_data;
let inner_items = opts.inner_items;
let span_data = get_bracket_span(&inner_node_span_data, context);
let force_use_new_lines = !context.config.computed_prefer_single_line
&& if let Some(span_data) = &span_data {
node_helpers::get_use_new_lines_for_nodes(&span_data.lo(), &inner_node_span_data.lo(), context)
} else {
false
};
return new_line_group(parse_surrounded_by_tokens(|context| {
if force_use_new_lines {
surround_with_new_lines(with_indent(inner_items))
} else {
parser_helpers::surround_with_newlines_indented_if_multi_line(inner_items, context.config.indent_width)
}
}, |_| None, ParseSurroundedByTokensOptions {
open_token: "[",
close_token: "]",
span_data,
first_member: Some(inner_node_span_data),
prefer_single_line_when_empty: false,
allow_open_token_trailing_comments: true,
}, context));
fn get_bracket_span(node: &dyn Ranged, context: &mut Context) -> Option<Span> {
let open_bracket = context.token_finder.get_previous_token_if_open_bracket(node);
let close_bracket = context.token_finder.get_next_token_if_close_bracket(node);
if let Some(open_bracket) = open_bracket {
if let Some(close_bracket) = close_bracket {
return Some(create_span_data(open_bracket.lo(), close_bracket.hi()));
}
}
None
}
}
fn parse_decorators<'a>(decorators: &'a Vec<Decorator>, is_inline: bool, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if decorators.is_empty() {
return items;
}
let force_use_new_lines = !context.config.decorators_prefer_single_line
&& decorators.len() >= 2
&& node_helpers::get_use_new_lines_for_nodes(&decorators[0], &decorators[1], context);
let separated_values_result = parse_separated_values_with_result(ParseSeparatedValuesOptions {
nodes: decorators.iter().map(|p| Some(p.into())).collect(),
prefer_hanging: false, // would need to think about the design because prefer_hanging causes a hanging indent
force_use_new_lines,
allow_blank_lines: false,
separator: Separator::none(),
single_line_space_at_start: false,
single_line_space_at_end: is_inline,
custom_single_line_separator: None,
multi_line_options: parser_helpers::MultiLineOptions::same_line_no_indent(),
force_possible_newline_at_start: false,
}, context);
items.extend(separated_values_result.items);
if is_inline {
let is_multi_line = separated_values_result.is_multi_line_condition_ref.create_resolver();
items.push_condition(if_true("inlineMultiLineSpace", is_multi_line, Signal::NewLine.into()));
} else {
items.push_signal(Signal::NewLine);
}
// parse the comments between the last decorator and the next token
if let Some(last_dec) = decorators.last() {
let next_token_pos = context.token_finder.get_next_token_pos_after(last_dec);
items.extend(parse_leading_comments(&next_token_pos, context));
}
return items;
}
fn parse_control_flow_separator(
next_control_flow_position: NextControlFlowPosition,
previous_node_block: &Span,
token_text: &str,
previous_start_info: Info,
previous_close_brace_condition_ref: Option<ConditionReference>,
context: &mut Context
) -> PrintItems {
let mut items = PrintItems::new();
match next_control_flow_position {
NextControlFlowPosition::SameLine => {
items.push_condition(if_true_or(
"newLineOrSpace",
move |condition_context| {
// newline if on the same line as the previous
if condition_resolvers::is_on_same_line(condition_context, &previous_start_info)? {
return Some(true);
}
// newline if the previous did not have a close brace
if let Some(previous_close_brace_condition_ref) = previous_close_brace_condition_ref {
if !condition_context.get_resolved_condition(&previous_close_brace_condition_ref)? {
return Some(true);
}
}
Some(false)
},
Signal::NewLine.into(),
" ".into(),
));
},
NextControlFlowPosition::NextLine => items.push_signal(Signal::NewLine),
NextControlFlowPosition::Maintain => {
let token = context.token_finder.get_first_keyword_after(previous_node_block, token_text);
if token.is_some() && node_helpers::is_first_node_on_line(token.unwrap(), context) {
items.push_signal(Signal::NewLine);
} else {
items.push_str(" ");
}
}
}
return items;
}
struct ParseHeaderWithConditionalBraceBodyOptions<'a> {
parent: Span,
body_node: Node<'a>,
parsed_header: PrintItems,
use_braces: UseBraces,
brace_position: BracePosition,
single_body_position: Option<SingleBodyPosition>,
requires_braces_condition_ref: Option<ConditionReference>,
}
struct ParseHeaderWithConditionalBraceBodyResult {
parsed_node: PrintItems,
open_brace_condition_ref: ConditionReference,
close_brace_condition_ref: ConditionReference,
}
fn parse_header_with_conditional_brace_body<'a>(opts: ParseHeaderWithConditionalBraceBodyOptions<'a>, context: &mut Context<'a>) -> ParseHeaderWithConditionalBraceBodyResult {
let start_header_info = Info::new("startHeader");
let end_header_info = Info::new("endHeader");
let mut items = PrintItems::new();
items.push_info(start_header_info);
items.extend(opts.parsed_header);
items.push_info(end_header_info);
let result = parse_conditional_brace_body(ParseConditionalBraceBodyOptions {
parent: opts.parent,
body_node: opts.body_node,
use_braces: opts.use_braces,
brace_position: opts.brace_position,
single_body_position: opts.single_body_position,
requires_braces_condition_ref: opts.requires_braces_condition_ref,
header_start_token: None,
start_header_info: Some(start_header_info),
end_header_info: Some(end_header_info),
}, context);
items.extend(result.parsed_node);
return ParseHeaderWithConditionalBraceBodyResult {
open_brace_condition_ref: result.open_brace_condition_ref,
close_brace_condition_ref: result.close_brace_condition_ref,
parsed_node: items,
};
}
struct ParseConditionalBraceBodyOptions<'a> {
parent: Span,
body_node: Node<'a>,
use_braces: UseBraces,
brace_position: BracePosition,
single_body_position: Option<SingleBodyPosition>,
requires_braces_condition_ref: Option<ConditionReference>,
header_start_token: Option<&'a TokenAndSpan>,
start_header_info: Option<Info>,
end_header_info: Option<Info>,
}
struct ParseConditionalBraceBodyResult {
parsed_node: PrintItems,
open_brace_condition_ref: ConditionReference,
close_brace_condition_ref: ConditionReference,
}
fn parse_conditional_brace_body<'a>(opts: ParseConditionalBraceBodyOptions<'a>, context: &mut Context<'a>) -> ParseConditionalBraceBodyResult {
// todo: reorganize...
let start_info = Info::new("startInfo");
let end_info = Info::new("endInfo");
let start_header_info = opts.start_header_info;
let end_header_info = opts.end_header_info;
let requires_braces_condition = opts.requires_braces_condition_ref;
let start_inner_text_info = Info::new("startInnerText");
let end_first_line_comments_info = Info::new("endFirstLineComments");
let start_statements_info = Info::new("startStatements");
let end_statements_info = Info::new("endStatements");
let header_trailing_comments = get_header_trailing_comments(&opts.body_node, context);
let body_should_be_multi_line = get_body_should_be_multi_line(&opts.body_node, &header_trailing_comments, context);
let should_use_new_line = get_should_use_new_line(
&opts.body_node,
body_should_be_multi_line,
&opts.single_body_position,
&opts.header_start_token,
&opts.parent,
context
);
let open_brace_token = get_open_brace_token(&opts.body_node, context);
let use_braces = opts.use_braces;
let is_body_empty_stmt = opts.body_node.kind() == NodeKind::EmptyStmt;
let mut space_condition = if_true(
"spaceCondition",
move |condition_context| {
if is_body_empty_stmt { return Some(false); }
if let Some(has_first_line_comments) = condition_resolvers::are_infos_not_equal(condition_context, &start_inner_text_info, &end_first_line_comments_info) {
if has_first_line_comments {
return Some(true);
}
}
let start_inner_text_info = condition_context.get_resolved_info(&start_inner_text_info)?;
let end_statements_info = condition_context.get_resolved_info(&end_statements_info)?;
if start_inner_text_info.line_number < end_statements_info.line_number {
return Some(false);
}
return Some(start_inner_text_info.column_number < end_statements_info.column_number);
},
Signal::SpaceOrNewLine.into(),
);
let space_condition_ref = space_condition.get_reference();
let mut newline_condition = if_true(
"newLineCondition",
move |condition_context| {
if is_body_empty_stmt { return Some(false); }
if should_use_new_line {
return Some(true);
}
let start_header_info = start_header_info.as_ref()?;
let resolved_start_info = condition_context.get_resolved_info(start_header_info)?;
if resolved_start_info.line_number < condition_context.writer_info.line_number {
return Some(true);
}
let resolved_end_statements_info = condition_context.get_resolved_info(&end_statements_info)?;
return Some(resolved_end_statements_info.line_number > resolved_start_info.line_number);
},
Signal::NewLine.into(),
);
let newline_condition_ref = newline_condition.get_reference();
let force_braces = get_force_braces(&opts.body_node);
let mut open_brace_condition = Condition::new_with_dependent_infos("openBrace", ConditionProperties {
condition: {
let has_open_brace_token = open_brace_token.is_some();
Rc::new(Box::new(move |condition_context| {
// never use braces for a single semi-colon on the end (ex. `for(;;);`)
if is_body_empty_stmt { return Some(false); }
match use_braces {
UseBraces::WhenNotSingleLine => {
if force_braces {
Some(true)
} else {
let is_multiple_lines = condition_resolvers::is_multiple_lines(
condition_context,
&start_header_info.unwrap_or(start_info),
&end_info
)?;
Some(is_multiple_lines)
}
},
UseBraces::Maintain => Some(force_braces || has_open_brace_token),
UseBraces::Always => Some(true),
UseBraces::PreferNone => {
if force_braces || body_should_be_multi_line {
return Some(true)
}
if let Some(start_header_info) = &start_header_info {
if let Some(end_header_info) = &end_header_info {
let is_header_multiple_lines = condition_resolvers::is_multiple_lines(condition_context, start_header_info, end_header_info)?;
if is_header_multiple_lines {
return Some(true);
}
}
}
let is_statements_multiple_lines = condition_resolvers::is_multiple_lines(condition_context, &start_statements_info, &end_statements_info)?;
if is_statements_multiple_lines {
return Some(true);
}
if let Some(requires_braces_condition) = &requires_braces_condition {
let requires_braces = condition_context.get_resolved_condition(requires_braces_condition)?;
if requires_braces {
return Some(true);
}
}
return Some(false);
}
}
}))
},
true_path: {
let mut items = PrintItems::new();
items.extend(parse_brace_separator(ParseBraceSeparatorOptions {
brace_position: opts.brace_position,
open_brace_token: open_brace_token,
start_header_info,
}, context));
items.push_str("{");
Some(items)
},
false_path: None,
}, vec![end_info]);
let open_brace_condition_ref = open_brace_condition.get_reference();
// parse body
let mut items = PrintItems::new();
items.push_info(start_info);
items.push_condition(open_brace_condition);
items.push_condition(space_condition);
items.push_info(start_inner_text_info);
let parsed_comments = parse_comment_collection(header_trailing_comments.into_iter(), None, None, context);
if !parsed_comments.is_empty() {
items.push_condition(conditions::indent_if_start_of_line(parsed_comments));
}
items.push_info(end_first_line_comments_info);
items.push_condition(newline_condition);
items.push_info(start_statements_info);
if let Node::BlockStmt(body_node) = opts.body_node {
items.extend(parser_helpers::with_indent({
let mut items = PrintItems::new();
// parse the remaining trailing comments inside because some of them are parsed already
// by parsing the header trailing comments
items.extend(parse_leading_comments(body_node, context));
items.extend(parse_statements(body_node.get_inner_span_data(context), body_node.stmts.iter().map(|x| x.into()), context));
items
}));
} else {
items.extend(parser_helpers::with_indent({
let mut items = PrintItems::new();
let body_node_span_data = opts.body_node.span_data();
items.extend(parse_node(opts.body_node, context));
items.extend(parse_trailing_comments(&body_node_span_data, context));
items
}));
}
items.push_info(end_statements_info);
let mut close_brace_condition = if_true(
"closeBrace",
move |condition_context| condition_context.get_resolved_condition(&open_brace_condition_ref),
{
let mut items = PrintItems::new();
items.push_condition(if_true_or(
"closeBraceNewLine",
move |condition_context| {
let is_new_line = condition_context.get_resolved_condition(&newline_condition_ref)?;
if !is_new_line { return Some(false); }
let has_statement_text = condition_resolvers::are_infos_not_equal(condition_context, &start_statements_info, &end_statements_info)?;
return Some(has_statement_text);
},
Signal::NewLine.into(),
if_true(
"closeBraceSpace",
move |condition_context| {
if condition_resolvers::is_at_same_position(condition_context, &start_inner_text_info)? {
return Some(false);
}
let had_space = condition_context.get_resolved_condition(&space_condition_ref)?;
return Some(had_space);
},
" ".into(),
).into()
));
items.push_str("}");
items
},
);
let close_brace_condition_ref = close_brace_condition.get_reference();
items.push_condition(close_brace_condition);
items.push_info(end_info);
// return result
return ParseConditionalBraceBodyResult {
parsed_node: items,
open_brace_condition_ref,
close_brace_condition_ref,
};
fn get_should_use_new_line<'a>(
body_node: &Node,
body_should_be_multi_line: bool,
single_body_position: &Option<SingleBodyPosition>,
header_start_token: &Option<&'a TokenAndSpan>,
parent: &Span,
context: &mut Context<'a>
) -> bool {
if body_should_be_multi_line {
return true;
}
if let Some(single_body_position) = single_body_position {
return match single_body_position {
SingleBodyPosition::Maintain => get_body_stmt_start_line(body_node, context) > get_header_start_line(header_start_token, parent, context),
SingleBodyPosition::NextLine => true,
SingleBodyPosition::SameLine => {
if let Node::BlockStmt(block_stmt) = body_node {
if block_stmt.stmts.len() != 1 {
return true;
}
return get_body_stmt_start_line(body_node, context) > get_header_start_line(header_start_token, parent, context);
}
return false;
},
}
} else {
if let Node::BlockStmt(block_stmt) = body_node {
if block_stmt.stmts.len() == 0 {
// keep the block on the same line
return block_stmt.start_line(context) < block_stmt.end_line(context);
}
}
return true;
}
fn get_body_stmt_start_line(body_node: &Node, context: &mut Context) -> usize {
if let Node::BlockStmt(body_node) = body_node {
if let Some(first_stmt) = body_node.stmts.get(0) {
return first_stmt.start_line(context);
}
}
return body_node.start_line(context);
}
fn get_header_start_line<'a>(header_start_token: &Option<&'a TokenAndSpan>, parent: &Span, context: &mut Context<'a>) -> usize {
if let Some(header_start_token) = header_start_token {
return header_start_token.start_line(context);
}
return parent.start_line(context);
}
}
fn get_body_should_be_multi_line<'a>(body_node: &Node<'a>, header_trailing_comments: &Vec<&'a Comment>, context: &mut Context<'a>) -> bool {
let mut has_leading_comment_on_different_line = |node: &dyn Ranged| {
node_helpers::has_leading_comment_on_different_line(
node,
/* comments to ignore */ Some(header_trailing_comments),
context
)
};
if let Node::BlockStmt(body_node) = body_node {
if body_node.stmts.len() == 1 && !has_leading_comment_on_different_line(&body_node.stmts[0]) {
return false;
}
if body_node.stmts.len() == 0 && body_node.start_line(context) == body_node.end_line(context) {
return false;
}
return true;
} else {
return has_leading_comment_on_different_line(body_node);
}
}
fn get_force_braces<'a>(body_node: &Node) -> bool {
if let Node::BlockStmt(body_node) = body_node {
return body_node.stmts.len() == 0;
} else {
return false;
}
}
fn get_header_trailing_comments<'a>(body_node: &Node<'a>, context: &mut Context<'a>) -> Vec<&'a Comment> {
let mut comments = Vec::new();
if let Node::BlockStmt(block_stmt) = body_node {
let comment_line = body_node.leading_comments(context).filter(|c| c.kind == CommentKind::Line).next();
if let Some(comment) = comment_line {
comments.push(comment);
return comments;
}
let open_brace_token = context.token_finder.get_first_open_brace_token_within(*block_stmt).expect("Expected to find an open brace token.");
let body_node_start_line = body_node.start_line(context);
comments.extend(open_brace_token.trailing_comments(context).take_while(|c| c.start_line(context) == body_node_start_line && c.kind == CommentKind::Line));
} else {
let leading_comments = body_node.leading_comments(context);
let last_header_token_end = context.token_finder.get_previous_token_end_before(body_node);
let last_header_token_end_line = last_header_token_end.end_line(context);
comments.extend(leading_comments.take_while(|c| c.start_line(context) <= last_header_token_end_line && c.kind == CommentKind::Line));
}
return comments;
}
fn get_open_brace_token<'a>(body_node: &Node<'a>, context: &mut Context<'a>) -> Option<&'a TokenAndSpan> {
if let Node::BlockStmt(block_stmt) = body_node {
context.token_finder.get_first_open_brace_token_within(*block_stmt)
} else {
None
}
}
}
struct ParseJsxWithOpeningAndClosingOptions<'a> {
opening_element: Node<'a>,
closing_element: Node<'a>,
children: Vec<Node<'a>>,
}
fn parse_jsx_with_opening_and_closing<'a>(opts: ParseJsxWithOpeningAndClosingOptions<'a>, context: &mut Context<'a>) -> PrintItems {
let force_use_multi_lines = get_force_use_multi_lines(&opts.opening_element, &opts.children, context);
let children = opts.children.into_iter().filter(|c| match c {
Node::JSXText(c) => !c.text(context).trim().is_empty(),
_=> true,
}).collect();
let start_info = Info::new("startInfo");
let end_info = Info::new("endInfo");
let mut items = PrintItems::new();
let inner_span_data = create_span_data(opts.opening_element.span_data().hi, opts.closing_element.span_data().lo);
items.push_info(start_info);
items.extend(parse_node(opts.opening_element, context));
items.extend(parse_jsx_children(ParseJsxChildrenOptions {
inner_span_data,
children,
parent_start_info: start_info,
parent_end_info: end_info,
force_use_multi_lines,
}, context));
items.extend(parse_node(opts.closing_element, context));
items.push_info(end_info);
return items;
fn get_force_use_multi_lines(opening_element: &Node, children: &Vec<Node>, context: &mut Context) -> bool {
if context.config.jsx_element_prefer_single_line {
false
} else if let Some(first_child) = children.get(0) {
if let Node::JSXText(first_child) = first_child {
if first_child.text(context).find("\n").is_some() {
return true;
}
}
node_helpers::get_use_new_lines_for_nodes(opening_element, first_child, context)
} else {
false
}
}
}
struct ParseJsxChildrenOptions<'a> {
inner_span_data: Span,
children: Vec<Node<'a>>,
parent_start_info: Info,
parent_end_info: Info,
force_use_multi_lines: bool,
}
fn parse_jsx_children<'a>(opts: ParseJsxChildrenOptions<'a>, context: &mut Context<'a>) -> PrintItems {
// Need to parse the children here so they only get parsed once.
// Nodes need to be only parsed once so that their comments don't end up in
// the handled comments collection and the second time they won't be parsed out.
let children = opts.children.into_iter().map(|c| (c.clone(), parse_node(c, context).into_rc_path())).collect();
let parent_start_info = opts.parent_start_info;
let parent_end_info = opts.parent_end_info;
if opts.force_use_multi_lines {
return parse_for_new_lines(children, opts.inner_span_data, context);
}
else {
// decide whether newlines should be used or not
return if_true_or(
"jsxChildrenNewLinesOrNot",
move |condition_context| {
// use newlines if the header is multiple lines
let resolved_parent_start_info = condition_context.get_resolved_info(&parent_start_info)?;
if resolved_parent_start_info.line_number < condition_context.writer_info.line_number {
return Some(true);
}
// use newlines if the entire jsx element is on multiple lines
return condition_resolvers::is_multiple_lines(condition_context, &parent_start_info, &parent_end_info);
},
parse_for_new_lines(children.clone(), opts.inner_span_data, context),
parse_for_single_line(children, context),
).into();
}
fn parse_for_new_lines<'a>(children: Vec<(Node<'a>, Option<PrintItemPath>)>, inner_span_data: Span, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
let has_children = !children.is_empty();
items.push_signal(Signal::NewLine);
items.extend(parser_helpers::with_indent(parse_statements_or_members(ParseStatementsOrMembersOptions {
inner_span_data,
items: children.into_iter().map(|(a, b)| (a, Some(b.into()))).collect(),
should_use_space: Some(Box::new(|previous, next, context| {
if let Node::JSXText(element) = previous {
element.text(context).ends_with(" ")
} else if let Node::JSXText(element) = next {
element.text(context).starts_with(" ")
} else {
false
}
})),
should_use_new_line: Some(Box::new(|previous, next, context| {
if let Node::JSXText(next) = next {
return !utils::has_no_new_lines_in_leading_whitespace(next.text(context));
}
if let Node::JSXText(previous) = previous {
return !utils::has_no_new_lines_in_trailing_whitespace(previous.text(context));
}
return true;
})),
should_use_blank_line: |previous, next, context| {
if let Node::JSXText(previous) = previous {
return utils::has_new_line_occurrences_in_trailing_whitespace(previous.text(context), 2);
}
if let Node::JSXText(next) = next {
return utils::has_new_line_occurrences_in_leading_whitespace(next.text(context), 2);
}
return node_helpers::has_separating_blank_line(previous, next, context);
},
separator: Separator::none(),
}, context)));
if has_children {
items.push_signal(Signal::NewLine);
}
items
}
fn parse_for_single_line<'a>(children: Vec<(Node<'a>, Option<PrintItemPath>)>, context: &mut Context<'a>) -> PrintItems {
let mut items = PrintItems::new();
if children.is_empty() {
items.push_signal(Signal::PossibleNewLine);
} else {
for (index, (child, parsed_child)) in children.into_iter().enumerate() {
if index > 0 && should_use_space(&child, context) {
items.push_signal(Signal::SpaceOrNewLine);
} else {
items.push_signal(Signal::PossibleNewLine);
}
items.extend(parsed_child.into());
}
items.push_signal(Signal::PossibleNewLine);
}
items
}
fn should_use_space(child: &Node, context: &mut Context) -> bool {
let past_token = context.token_finder.get_previous_token(child);
if let Some(TokenAndSpan { token: swc_ecmascript::parser::token::Token::JSXText { .. }, span, had_line_break }) = past_token {
let text = span.text(context);
if !had_line_break && text.ends_with(" ") {
return true;
}
}
if let Node::JSXText(child) = child {
child.text(context).starts_with(" ")
} else {
false
}
}
}
fn parse_assignment<'a>(expr: Node<'a>, op: &str, context: &mut Context<'a>) -> PrintItems {
let op_token = context.token_finder.get_previous_token(&expr);
#[cfg(debug_assertions)]
assert_has_op(op, op_token, context);
parse_assignment_like_with_token(expr, op, op_token, context)
}
fn parse_assignment_like_with_token<'a>(expr: Node<'a>, op: &str, op_token: Option<&TokenAndSpan>, context: &mut Context<'a>) -> PrintItems {
let use_new_line_group = get_use_new_line_group(&expr);
let mut items = PrintItems::new();
if op == ":" { items.push_str(op) } else { items.push_str(&format!(" {}", op)) }; // good enough for now...
let op_end = op_token.map(|x| x.hi()).unwrap_or_else(|| context.token_finder.get_previous_token_end_before(&expr));
let op_trailing_comments = get_op_trailing_comments(op_end, context);
let had_op_trailing_comments = !op_trailing_comments.is_empty();
if !op_trailing_comments.is_empty() {
items.extend(with_indent({
let mut items = PrintItems::new();
if let Some(first_comment) = op_trailing_comments.iter().next() {
if first_comment.kind == CommentKind::Block {
items.push_signal(Signal::SpaceIfNotTrailing);
}
}
items.extend(parse_comment_collection(op_trailing_comments.into_iter(), Some(&op_end), None, context));
items
}));
items.push_signal(Signal::NewLine);
}
let parsed_assignment = {
let mut items = PrintItems::new();
if !had_op_trailing_comments {
items.push_condition(conditions::if_above_width_or(
context.config.indent_width,
{
let mut items = PrintItems::new();
items.push_signal(Signal::SpaceIfNotTrailing);
items.push_signal(Signal::PossibleNewLine);
items
},
Signal::SpaceIfNotTrailing.into()
).into());
}
let assignment = parse_node(expr, context);
let assignment = if had_op_trailing_comments { assignment } else { conditions::indent_if_start_of_line(assignment).into() };
let assignment = if use_new_line_group { new_line_group(assignment) } else { assignment };
items.extend(assignment);
items
}.into_rc_path();
items.push_condition(if_true_or(
"indentIfStartOfLineIndentedOrTokenHadTrailingLineComment",
move |context| Some(had_op_trailing_comments || condition_resolvers::is_start_of_line_indented(context)),
with_indent(parsed_assignment.clone().into()),
parsed_assignment.into()
));
return items;
fn get_use_new_line_group(expr: &Node) -> bool {
match expr {
Node::MemberExpr(_) => true,
_ => false,
}
}
fn get_op_trailing_comments<'a>(previous_token_end: BytePos, context: &mut Context<'a>) -> Vec<&'a Comment> {
let mut comments = Vec::new();
let trailing_comments = previous_token_end.trailing_comments(context);
if !trailing_comments.is_empty() {
let next_token_pos = context.token_finder.get_next_token_pos_after(&previous_token_end);
let next_token_start_line = next_token_pos.start_line(context);
for comment in trailing_comments {
if !context.has_handled_comment(comment) && comment.start_line(context) < next_token_start_line {
comments.push(comment);
}
}
}
comments
}
}
struct ParseBlockOptions<'a> {
span_data: Option<Span>,
children: Vec<Node<'a>>,
}
fn parse_block<'a>(
parse_inner: impl FnOnce(Vec<Node<'a>>, &mut Context<'a>) -> PrintItems,
opts: ParseBlockOptions<'a>,
context: &mut Context<'a>
) -> PrintItems {
let mut items = PrintItems::new();
let before_open_token_info = Info::new("after_open_token_info");
let first_member_span_data = opts.children.get(0).map(|x| x.span_data());
let span_data = opts.span_data;
items.push_info(before_open_token_info);
items.extend(parse_surrounded_by_tokens(|context| {
let mut items = PrintItems::new();
let start_inner_info = Info::new("startStatementsInfo");
let end_inner_info = Info::new("endStatementsInfo");
let is_tokens_same_line_and_empty = if let Some(span_data) = &span_data {
span_data.start_line(context) == span_data.end_line(context) && opts.children.is_empty()
} else { true };
if !is_tokens_same_line_and_empty {
items.push_signal(Signal::NewLine);
}
items.push_info(start_inner_info);
items.extend(parser_helpers::with_indent(parse_inner(opts.children, context)));
items.push_info(end_inner_info);
if is_tokens_same_line_and_empty {
items.push_condition(if_true(
"newLineIfDifferentLine",
move |context| condition_resolvers::is_on_different_line(context, &before_open_token_info),
Signal::NewLine.into()
));
} else {
items.push_condition(if_false(
"endNewline",
move |context| condition_resolvers::are_infos_equal(context, &start_inner_info, &end_inner_info),
Signal::NewLine.into(),
));
}
items
}, |_| None, ParseSurroundedByTokensOptions {
open_token: "{",
close_token: "}",
span_data,
first_member: first_member_span_data,
prefer_single_line_when_empty: false,
allow_open_token_trailing_comments: true,
}, context));
items
}
struct ParseSurroundedByTokensOptions {
open_token: &'static str,
close_token: &'static str,
/// When `None`, means the tokens are missing
span_data: Option<Span>,
first_member: Option<Span>,
prefer_single_line_when_empty: bool,
allow_open_token_trailing_comments: bool,
}
fn parse_surrounded_by_tokens<'a>(
parse_inner: impl FnOnce(&mut Context<'a>) -> PrintItems,
custom_close_token: impl FnOnce(&mut Context<'a>) -> Option<PrintItems>,
opts: ParseSurroundedByTokensOptions,
context: &mut Context<'a>
) -> PrintItems {
let mut items = PrintItems::new();
if let Some(span_data) = opts.span_data {
let open_token_end = BytePos(span_data.lo.0 + (opts.open_token.len() as u32));
let close_token_start = BytePos(span_data.hi.0 - (opts.close_token.len() as u32));
// assert the tokens are in the place the caller says they are
#[cfg(debug_assertions)]
context.assert_text(span_data.lo, open_token_end.lo(), opts.open_token);
#[cfg(debug_assertions)]
context.assert_text(close_token_start.lo(), span_data.hi, opts.close_token);
// parse
let open_token_start_line = open_token_end.start_line(context);
items.push_str(opts.open_token);
if let Some(first_member) = opts.first_member {
let first_member_start_line = first_member.start_line(context);
if opts.allow_open_token_trailing_comments && open_token_start_line < first_member_start_line {
items.extend(parse_first_line_trailing_comment(open_token_start_line, open_token_end.trailing_comments(context), context));
}
items.extend(parse_inner(context));
let before_trailing_comments_info = Info::new("beforeTrailingComments");
items.push_info(before_trailing_comments_info);
items.extend(with_indent(parse_trailing_comments_as_statements(&open_token_end, context)));
items.extend(with_indent(parse_comments_as_statements(close_token_start.leading_comments(context), None, context)));
items.push_condition(if_true(
"newLineIfHasCommentsAndNotStartOfNewLine",
move |context| {
let had_comments = !condition_resolvers::is_at_same_position(context, &before_trailing_comments_info)?;
return Some(had_comments && !context.writer_info.is_start_of_line())
},
Signal::NewLine.into()
));
} else {
let comments = open_token_end.trailing_comments(context);
let is_single_line = open_token_start_line == close_token_start.start_line(context);
if !comments.is_empty() {
// parse the trailing comment on the first line only if multi-line and if a comment line
if !is_single_line {
items.extend(parse_first_line_trailing_comment(open_token_start_line, comments.clone(), context));
}
// parse the comments
if comments.has_unhandled_comment(context) {
if is_single_line {
let indent_width = context.config.indent_width;
items.extend(parser_helpers::parse_separated_values(|_| {
let mut parsed_comments = Vec::new();
for c in comments {
let start_line = c.start_line(context);
let end_line = c.end_line(context);
if let Some(items) = parse_comment(c, context) {
parsed_comments.push(parser_helpers::ParsedValue {
items,
lines_span: Some(parser_helpers::LinesSpan { start_line, end_line }),
allow_inline_multi_line: false,
allow_inline_single_line: false,
});
}
}
parsed_comments
}, parser_helpers::ParseSeparatedValuesOptions {
prefer_hanging: false,
force_use_new_lines: !is_single_line,
allow_blank_lines: true,
single_line_space_at_start: false,
single_line_space_at_end: false,
single_line_separator: Signal::SpaceOrNewLine.into(),
indent_width,
multi_line_options: parser_helpers::MultiLineOptions::surround_newlines_indented(),
force_possible_newline_at_start: false,
}).items);
} else {
items.push_signal(Signal::NewLine);
items.extend(with_indent(parse_comments_as_statements(comments, None, context)));
items.push_signal(Signal::NewLine);
}
}
} else {
if !is_single_line && !opts.prefer_single_line_when_empty {
items.push_signal(Signal::NewLine);
}
}
}
} else {
// todo: have a warning here when this happens
items.push_str(opts.open_token);
items.extend(parse_inner(context));
}
if let Some(parsed_close_token) = (custom_close_token)(context) {
items.extend(parsed_close_token);
} else {
items.push_str(opts.close_token);
}
return items;
fn parse_first_line_trailing_comment(open_token_start_line: usize, comments: CommentsIterator, context: &mut Context) -> PrintItems {
let mut items = PrintItems::new();
let first_comment = comments.into_iter().next();
if let Some(first_comment) = first_comment {
if first_comment.kind == CommentKind::Line && first_comment.start_line(context) == open_token_start_line {
if let Some(parsed_comment) = parse_comment(&first_comment, context) {
items.push_signal(Signal::StartForceNoNewLines);
items.push_str(" ");
items.extend(parsed_comment);
items.push_signal(Signal::FinishForceNoNewLines);
}
}
}
items
}
}
#[cfg(debug_assertions)]
fn assert_has_op<'a>(op: &str, op_token: Option<&TokenAndSpan>, context: &mut Context<'a>) {
if let Some(op_token) = op_token {
context.assert_text(op_token.lo(), op_token.hi(), op);
} else {
panic!("Debug panic! Expected to have op token: {}", op);
}
}
fn use_new_line_group_for_arrow_body(arrow_expr: &ArrowExpr) -> bool {
match &arrow_expr.body {
BlockStmtOrExpr::Expr(expr) => match &**expr {
Expr::Paren(paren) => match &*paren.expr {
Expr::Object(_) => false,
_ => true,
},
_ => true,
},
_ => true,
}
}
/* is/has functions */
fn is_expr_template(node: &Expr) -> bool {
match node {
Expr::Tpl(_) => true,
_ => false
}
}
fn is_arrow_function_with_expr_body(node: &Node) -> bool {
match node {
Node::ExprOrSpread(expr_or_spread) => {
match &*expr_or_spread.expr {
Expr::Arrow(arrow) => {
match &arrow.body {
BlockStmtOrExpr::Expr(_) => true,
_ => false,
}
},
_ => false,
}
},
_ => false,
}
}
fn allows_inline_multi_line(node: &Node, has_siblings: bool) -> bool {
return match node {
Node::Param(param) => allows_inline_multi_line(&(¶m.pat).into(), has_siblings),
Node::TsAsExpr(as_expr) => allows_inline_multi_line(&(&as_expr.expr).into(), has_siblings)
&& match &*as_expr.type_ann {
TsType::TsTypeRef(_) | TsType::TsKeywordType(_) => true,
_ => allows_inline_multi_line(&(&as_expr.type_ann).into(), has_siblings)
},
Node::FnExpr(_) | Node::ArrowExpr(_) | Node::ObjectLit(_) | Node::ArrayLit(_)
| Node::ObjectPat(_) | Node::ArrayPat(_)
| Node::TsTypeLit(_) | Node::TsTupleType(_)
| Node::TsArrayType(_) => true,
Node::ExprOrSpread(node) => allows_inline_multi_line(&(&*node.expr).into(), has_siblings),
Node::TaggedTpl(_) | Node::Tpl(_) => !has_siblings,
Node::CallExpr(node) => !has_siblings && allow_inline_for_call_expr(node),
Node::Ident(node) => match &node.type_ann {
Some(type_ann) => allows_inline_multi_line(&(&type_ann.type_ann).into(), has_siblings),
None => false,
},
Node::AssignPat(node) => allows_inline_multi_line(&(&node.left).into(), has_siblings)
|| allows_inline_multi_line(&(&node.right).into(), has_siblings),
Node::TsTypeAnn(type_ann) => allows_inline_multi_line(&(&type_ann.type_ann).into(), has_siblings),
Node::TsTupleElement(tuple_element) => allows_inline_multi_line(&(&tuple_element.ty).into(), has_siblings),
_ => false,
};
fn allow_inline_for_call_expr(node: &CallExpr) -> bool {
// do not allow call exprs with nested call exprs in the member expr to be inline
return allow_for_expr_or_super(&node.callee);
fn allow_for_expr_or_super(expr_or_super: &ExprOrSuper) -> bool {
match expr_or_super {
ExprOrSuper::Expr(expr) => {
let expr = &**expr;
match expr {
Expr::Member(member_expr) => allow_for_expr_or_super(&member_expr.obj),
Expr::Call(_) => false,
_=> true,
}
},
ExprOrSuper::Super(_) => true,
}
}
}
}
fn get_use_new_lines_for_nodes_with_preceeding_token(open_token_text: &str, nodes: &Vec<impl Ranged>, prefer_single_line: bool, context: &mut Context) -> bool {
if nodes.is_empty() {
return false;
}
if prefer_single_line {
// basic rule: if any comments exist on separate lines, then everything becomes multi-line
has_any_node_comment_on_different_line(nodes, context)
} else {
let first_node = &nodes[0];
let previous_token = context.token_finder.get_previous_token(first_node);
if let Some(previous_token) = previous_token {
if previous_token.text(context) == open_token_text {
return node_helpers::get_use_new_lines_for_nodes(previous_token, first_node, context);
}
}
// arrow function expressions might not have an open paren (ex. `a => a + 5`)
false
}
}
fn get_use_new_lines_for_nodes(nodes: &Vec<impl Ranged>, prefer_single_line: bool, context: &mut Context) -> bool {
if nodes.len() < 2 {
return false;
}
if prefer_single_line {
// basic rule: if any comments exist on separate lines, then everything becomes multi-line
has_any_node_comment_on_different_line(nodes, context)
} else {
node_helpers::get_use_new_lines_for_nodes(&nodes[0], &nodes[1], context)
}
}
/// Gets if any of the provided nodes have leading or trailing comments on a different line.
fn has_any_node_comment_on_different_line(nodes: &Vec<impl Ranged>, context: &mut Context) -> bool {
for (i, node) in nodes.iter().enumerate() {
if i == 0 {
let first_node_start_line = node.start_line(context);
if node.leading_comments(context).filter(|c| c.kind == CommentKind::Line || c.start_line(context) < first_node_start_line).next().is_some() {
return true;
}
}
let node_end = node.hi();
let next_node_pos = nodes.get(i + 1).map(|n| n.lo());
if check_pos_has_trailing_comments(node_end, next_node_pos, context) {
return true;
} else if let Some(comma) = context.token_finder.get_next_token_if_comma(&node_end) {
if check_pos_has_trailing_comments(comma.hi(), next_node_pos, context) {
return true;
}
}
}
return false;
fn check_pos_has_trailing_comments(end: BytePos, next_node_pos: Option<BytePos>, context: &mut Context) -> bool {
let end_line = end.end_line(context);
let stop_line = next_node_pos.map(|p| p.start_line(context));
for c in end.trailing_comments(context) {
if c.kind == CommentKind::Line {
return true;
}
if let Some(stop_line) = stop_line {
if c.start_line(context) >= stop_line {
// do not look at comments that the next node owns
return false;
}
}
if c.end_line(context) > end_line {
return true;
}
}
false
}
}
/* config helpers */
fn get_parsed_separator(separator: &Separator, is_trailing: bool, is_multi_line: &(impl Fn(&mut ConditionResolverContext) -> Option<bool> + Clone + 'static)) -> PrintItems {
debug_assert!(!separator.is_none());
// performance optimization
return if separator.single_line == separator.multi_line {
get_items(&separator.single_line, is_trailing, is_multi_line)
} else {
if_true_or(
"is_multi_line",
is_multi_line.clone(),
get_items(&separator.multi_line, is_trailing, is_multi_line),
get_items(&separator.single_line, is_trailing, is_multi_line),
).into()
};
fn get_items(value: &Option<SeparatorValue>, is_trailing: bool, is_multi_line: &(impl Fn(&mut ConditionResolverContext) -> Option<bool> + Clone + 'static)) -> PrintItems {
match value {
Some(SeparatorValue::Comma(trailing_comma)) => get_parsed_trailing_comma(*trailing_comma, is_trailing, is_multi_line),
Some(SeparatorValue::SemiColon(semi_colons)) => get_parsed_semi_colon(*semi_colons, is_trailing, is_multi_line),
None => PrintItems::new(),
}
}
}
fn get_parsed_trailing_comma(option: TrailingCommas, is_trailing: bool, is_multi_line: &(impl Fn(&mut ConditionResolverContext) -> Option<bool> + Clone + 'static)) -> PrintItems {
if !is_trailing { return ",".into(); }
match option {
TrailingCommas::Always => ",".into(),
TrailingCommas::OnlyMultiLine => {
if_true("trailingCommaIfMultiLine", is_multi_line.clone(), ",".into()).into()
},
TrailingCommas::Never => {
PrintItems::new()
},
}
}
fn get_parsed_semi_colon(option: SemiColons, is_trailing: bool, is_multi_line: &(impl Fn(&mut ConditionResolverContext) -> Option<bool> + Clone + 'static)) -> PrintItems {
match option {
SemiColons::Always => ";".into(),
SemiColons::Prefer => {
if is_trailing {
if_true("semiColonIfMultiLine", is_multi_line.clone(), ";".into()).into()
} else {
";".into()
}
},
SemiColons::Asi => {
if is_trailing {
PrintItems::new()
} else {
if_false("semiColonIfSingleLine", is_multi_line.clone(), ";".into()).into()
}
},
}
}
fn create_span_data(lo: BytePos, hi: BytePos) -> Span {
Span { lo, hi, ctxt: Default::default() }
} |
#![cfg_attr(feature="alloc_system",feature(alloc_system))]
#[cfg(feature="alloc_system")]
extern crate alloc_system;
extern crate crypto;
#[macro_use]
extern crate serde_json;
extern crate crossbeam;
extern crate walkdir;
extern crate semver;
extern crate zip;
extern crate tempdir;
extern crate libc;
extern crate uuid;
mod defs;
pub use defs::*;
mod producer;
pub use producer::*;
mod gcov;
pub use gcov::*;
mod parser;
pub use parser::*;
mod path_rewriting;
pub use path_rewriting::*;
mod output;
pub use output::*;
use std::collections::{btree_map, hash_map};
use std::fs::{self, File};
use std::io::{Cursor, BufReader};
use std::path::PathBuf;
use walkdir::WalkDir;
// Merge results, without caring about duplicate lines (they will be removed at the end).
fn merge_results(result: &mut CovResult, result2: &mut CovResult) {
for (&line_no, &execution_count) in &result2.lines {
match result.lines.entry(line_no) {
btree_map::Entry::Occupied(c) => {
*c.into_mut() += execution_count;
},
btree_map::Entry::Vacant(v) => {
v.insert(execution_count);
}
};
}
for (&(line_no, number), &taken) in &result2.branches {
match result.branches.entry((line_no, number)) {
btree_map::Entry::Occupied(c) => {
*c.into_mut() |= taken;
},
btree_map::Entry::Vacant(v) => {
v.insert(taken);
}
};
}
for (name, function) in result2.functions.drain() {
match result.functions.entry(name) {
hash_map::Entry::Occupied(f) => f.into_mut().executed |= function.executed,
hash_map::Entry::Vacant(v) => {
v.insert(function);
}
};
}
}
fn add_results(mut results: Vec<(String,CovResult)>, result_map: &SyncCovResultMap) {
let mut map = result_map.lock().unwrap();
for mut result in results.drain(..) {
match map.entry(result.0) {
hash_map::Entry::Occupied(obj) => {
merge_results(obj.into_mut(), &mut result.1);
},
hash_map::Entry::Vacant(v) => {
v.insert(result.1);
}
};
}
}
// Some versions of GCC, because of a bug, generate multiple gcov files for each
// gcno, so we have to support this case too for the time being.
#[derive(PartialEq, Eq)]
enum GcovType {
Unknown,
SingleFile,
MultipleFiles,
}
macro_rules! try_parse {
($v:expr, $f:expr) => (match $v {
Ok(val) => val,
Err(err) => {
eprintln!("Error parsing file {}:", $f);
eprintln!("{}", err);
std::process::exit(1);
}
});
}
pub fn consumer(working_dir: &PathBuf, result_map: &SyncCovResultMap, queue: &WorkQueue, is_llvm: bool, branch_enabled: bool) {
let mut gcov_type = GcovType::Unknown;
while let Some(work_item) = queue.pop() {
let new_results = match work_item.format {
ItemFormat::GCNO => {
let gcno_path = work_item.path();
if !is_llvm {
run_gcov(gcno_path, branch_enabled, working_dir);
} else {
call_parse_llvm_gcno(working_dir.to_str().unwrap(), gcno_path.parent().unwrap().join(gcno_path.file_stem().unwrap()).to_str().unwrap(), branch_enabled);
}
let gcov_path = working_dir.join(gcno_path.file_name().unwrap().to_str().unwrap().to_string() + ".gcov");
if gcov_type == GcovType::Unknown {
gcov_type = if gcov_path.exists() {
GcovType::SingleFile
} else {
GcovType::MultipleFiles
};
}
if gcov_type == GcovType::SingleFile {
let new_results = try_parse!(parse_gcov(&gcov_path), gcov_path.display());
fs::remove_file(gcov_path).unwrap();
new_results
} else {
let mut new_results: Vec<(String,CovResult)> = Vec::new();
for entry in WalkDir::new(&working_dir).min_depth(1) {
let gcov_path = entry.unwrap();
let gcov_path = gcov_path.path();
new_results.append(&mut try_parse!(parse_gcov(&gcov_path), gcov_path.display()));
fs::remove_file(gcov_path).unwrap();
}
new_results
}
},
ItemFormat::INFO => {
match work_item.item {
ItemType::Path(info_path) => {
let f = File::open(&info_path).expect("Failed to open lcov file");
let file = BufReader::new(&f);
try_parse!(parse_lcov(file, branch_enabled), info_path.display())
},
ItemType::Content(info_content) => {
let buffer = BufReader::new(Cursor::new(info_content));
try_parse!(parse_lcov(buffer, branch_enabled), "")
}
}
}
};
add_results(new_results, result_map);
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
#[test]
fn test_merge_results() {
let mut functions1: HashMap<String,Function> = HashMap::new();
functions1.insert("f1".to_string(), Function {
start: 1,
executed: false,
});
functions1.insert("f2".to_string(), Function {
start: 2,
executed: false,
});
let mut result = CovResult {
lines: [(1, 21),(2, 7),(7,0)].iter().cloned().collect(),
branches: [((1, 0), false), ((1, 1), false), ((2, 0), false), ((2, 1), true), ((4, 0), true)].iter().cloned().collect(),
functions: functions1,
};
let mut functions2: HashMap<String,Function> = HashMap::new();
functions2.insert("f1".to_string(), Function {
start: 1,
executed: false,
});
functions2.insert("f2".to_string(), Function {
start: 2,
executed: true,
});
let mut result2 = CovResult {
lines: [(1,21),(3,42),(4,7),(2,0),(8,0)].iter().cloned().collect(),
branches: [((1, 0), false), ((1, 1), false), ((2, 0), true), ((2, 1), false), ((3, 0), true)].iter().cloned().collect(),
functions: functions2,
};
merge_results(&mut result, &mut result2);
assert_eq!(result.lines, [(1,42),(2,7),(3,42),(4,7),(7,0),(8,0)].iter().cloned().collect());
assert_eq!(result.branches, [((1, 0), false), ((1, 1), false), ((2, 0), true), ((2, 1), true), ((3, 0), true), ((4, 0), true)].iter().cloned().collect());
assert!(result.functions.contains_key("f1"));
assert!(result.functions.contains_key("f2"));
let mut func = result.functions.get("f1").unwrap();
assert_eq!(func.start, 1);
assert_eq!(func.executed, false);
func = result.functions.get("f2").unwrap();
assert_eq!(func.start, 2);
assert_eq!(func.executed, true);
}
}
|
use anchor_lang::{AccountsExit, Key, prelude::*};
use anchor_spl::token::{self, Burn, Mint, MintTo, TokenAccount, Transfer};
use spl_token::state::Account as SPLTokenAccount;
use solana_program::{program::invoke, program_error::ProgramError, program_pack::Pack, system_instruction, system_program};
declare_id!("5JqFVkV8QNpm7WpiUkRxrXbDVncRYgdbia8EWDWNrQ62");
const PREFIX: &str = "stake";
#[program]
pub mod stake {
use super::*;
pub fn initialize(ctx: Context<Initialize>, bump: u8) -> ProgramResult {
let stake_account = &mut ctx.accounts.stake_account;
stake_account.bump = bump;
stake_account.authority = *ctx.accounts.authority.to_account_info().key;
Ok(())
}
pub fn stake_jambo(ctx: Context<StakeJambo>, bump: u8) -> ProgramResult {
let stake_jambo_account = &mut ctx.accounts.stake_jambo_account;
stake_jambo_account.bump = bump;
stake_jambo_account.jambo_mint = *ctx.accounts.jambo_mint.to_account_info().key;
stake_jambo_account.jambo_mint_pool = *ctx.accounts.jambo_mint_pool.to_account_info().key;
// Transfer the underlying assets to the underlying assets pool
let cpi_accounts = Transfer {
from: ctx.accounts.jambo_mint_src.to_account_info(),
to: ctx.accounts.jambo_mint_pool.to_account_info(),
authority: ctx.accounts.authority.clone(),
};
let cpi_token_program = ctx.accounts.token_program.clone();
let cpi_ctx = CpiContext::new(cpi_token_program, cpi_accounts);
let underlying_transfer_amount = 1;
token::transfer(cpi_ctx, underlying_transfer_amount)?;
Ok(())
}
#[access_control(UnstakeJambo::accounts(&ctx))]
pub fn unstake_jambo(ctx: Context<UnstakeJambo>) -> ProgramResult {
let stake_jambo_account = &ctx.accounts.stake_jambo_account;
let seeds = &[
PREFIX.as_bytes(),
stake_jambo_account.jambo_mint.as_ref(),
&[stake_jambo_account.bump]
];
let signer = &[&seeds[..]];
// Transfer the underlying assets to the underlying assets pool
let cpi_accounts = Transfer {
from: ctx.accounts.jambo_mint_pool.to_account_info(),
to: ctx.accounts.jambo_mint_dest.to_account_info(),
authority: ctx.accounts.stake_jambo_account.to_account_info(),
};
let cpi_token_program = ctx.accounts.token_program.clone();
let cpi_ctx = CpiContext::new_with_signer(cpi_token_program, cpi_accounts, signer);
let underlying_transfer_amount = 1;
token::transfer(cpi_ctx, underlying_transfer_amount)?;
Ok(())
}
#[access_control(RestakeJambo::accounts(&ctx))]
pub fn restake_jambo(ctx: Context<RestakeJambo>) -> ProgramResult {
let stake_jambo_account = &ctx.accounts.stake_jambo_account;
// Transfer the underlying assets to the underlying assets pool
let cpi_accounts = Transfer {
from: ctx.accounts.jambo_mint_src.to_account_info(),
to: ctx.accounts.jambo_mint_pool.to_account_info(),
authority: ctx.accounts.authority.clone(),
};
let cpi_token_program = ctx.accounts.token_program.clone();
let cpi_ctx = CpiContext::new(cpi_token_program, cpi_accounts);
let underlying_transfer_amount = 1;
token::transfer(cpi_ctx, underlying_transfer_amount)?;
Ok(())
}
}
#[derive(Accounts)]
#[instruction(bump: u8)]
pub struct Initialize<'info> {
#[account(init, seeds=[PREFIX.as_bytes(), authority.key().as_ref()], bump=bump, payer=authority)]
pub stake_account: Account<'info, StakeAccount>,
// // #[account(signer, constraint= authority.data_is_empty() && authority.lamports() > 0)]
// authority: AccountInfo<'info>,
#[account(mut)]
pub authority: Signer<'info>,
// // #[account(address = system_program::ID)]
pub system_program: Program<'info, System>,
// rent: Sysvar<'info, Rent>,
}
#[derive(Accounts)]
#[instruction(bump: u8)]
pub struct StakeJambo<'info> {
#[account(init, seeds=[PREFIX.as_bytes(), jambo_mint.key().as_ref()], bump=bump, payer=authority)]
pub stake_jambo_account: Account<'info, StakeJamboAccount>,
#[account(mut, signer)]
pub authority: AccountInfo<'info>,
pub jambo_mint: Box<Account<'info, Mint>>,
#[account(mut)]
pub jambo_mint_src: Box<Account<'info, TokenAccount>>,
#[account(init,
seeds = [&stake_jambo_account.key().to_bytes()[..], b"jamboMintPool"],
bump,
payer = authority,
token::mint = jambo_mint,
token::authority = stake_jambo_account,
)]
pub jambo_mint_pool: Box<Account<'info, TokenAccount>>,
pub token_program: AccountInfo<'info>,
// pub associated_token_program: AccountInfo<'info>,
pub system_program: Program<'info, System>,
pub rent: Sysvar<'info, Rent>,
}
#[derive(Accounts)]
pub struct UnstakeJambo<'info> {
#[account(
mut,
seeds = [PREFIX.as_bytes(), stake_jambo_account.jambo_mint.key().as_ref()],
bump = stake_jambo_account.bump
)]
pub stake_jambo_account: Account<'info, StakeJamboAccount>,
#[account(mut, signer)]
pub authority: AccountInfo<'info>,
pub jambo_mint: Box<Account<'info, Mint>>,
#[account(mut)]
pub jambo_mint_dest: Box<Account<'info, TokenAccount>>,
#[account(mut)]
pub jambo_mint_pool: Box<Account<'info, TokenAccount>>,
pub token_program: AccountInfo<'info>,
// pub associated_token_program: AccountInfo<'info>,
pub system_program: Program<'info, System>,
pub rent: Sysvar<'info, Rent>,
}
impl<'info> UnstakeJambo<'info> {
fn accounts(ctx: &Context<UnstakeJambo<'info>>) -> ProgramResult {
// Validate the mint pool is the same as on the StakeJamboAccount
if *ctx.accounts.jambo_mint_pool.to_account_info().key != ctx.accounts.stake_jambo_account.jambo_mint_pool {
return Err(ErrorCode::JamboMintPoolAccountDoesNotMatchStake.into())
}
// Validate the jambo mint is the same as on the StakeJamboAccount
if *ctx.accounts.jambo_mint.to_account_info().key != ctx.accounts.stake_jambo_account.jambo_mint {
return Err(ErrorCode::JamboMintDoesNotMatchStake.into())
}
Ok(())
}
}
#[derive(Accounts)]
pub struct RestakeJambo<'info> {
#[account(
mut,
seeds = [PREFIX.as_bytes(), stake_jambo_account.jambo_mint.key().as_ref()],
bump = stake_jambo_account.bump
)]
pub stake_jambo_account: Account<'info, StakeJamboAccount>,
#[account(mut, signer)]
pub authority: AccountInfo<'info>,
pub jambo_mint: Box<Account<'info, Mint>>,
#[account(mut)]
pub jambo_mint_src: Box<Account<'info, TokenAccount>>,
#[account(mut)]
pub jambo_mint_pool: Box<Account<'info, TokenAccount>>,
pub token_program: AccountInfo<'info>,
// pub associated_token_program: AccountInfo<'info>,
pub system_program: Program<'info, System>,
pub rent: Sysvar<'info, Rent>,
}
impl<'info> RestakeJambo<'info> {
fn accounts(ctx: &Context<RestakeJambo<'info>>) -> ProgramResult {
// Validate the mint pool is the same as on the StakeJamboAccount
if *ctx.accounts.jambo_mint_pool.to_account_info().key != ctx.accounts.stake_jambo_account.jambo_mint_pool {
return Err(ErrorCode::JamboMintPoolAccountDoesNotMatchStake.into())
}
// Validate the jambo mint is the same as on the StakeJamboAccount
if *ctx.accounts.jambo_mint.to_account_info().key != ctx.accounts.stake_jambo_account.jambo_mint {
return Err(ErrorCode::JamboMintDoesNotMatchStake.into())
}
Ok(())
}
}
#[account]
#[derive(Default)]
pub struct StakeJamboAccount {
pub jambo_mint: Pubkey,
pub jambo_mint_pool: Pubkey,
pub bump: u8,
}
#[account]
#[derive(Default)]
pub struct StakeAccount {
pub authority: Pubkey,
pub bump: u8,
}
// #[derive(AnchorSerialize, AnchorDeserialize, Clone, Default)]
// pub struct StakeJamboAccountData {
// pub jambo_mint: Pubkey,
// }
#[error]
pub enum ErrorCode {
#[msg("Mint pool is not same as on the StakeJamboAccount!")]
JamboMintPoolAccountDoesNotMatchStake,
#[msg("Jambo mint is not same as on the StakeJamboAccount!")]
JamboMintDoesNotMatchStake,
} |
pub mod models;
pub mod operations;
#[allow(dead_code)]
pub const API_VERSION: &str = "2018-02-01-preview";
|
use crate::NumberStyle;
use std::cmp::Ordering::*;
use std::cmp::Ordering;
pub trait Section {
fn print_info(&self, style: NumberStyle);
fn start(&self) -> u64;
fn size(&self) -> usize;
fn end(&self) -> u64 {
self.start() + self.size() as u64 - 1
}
fn compare_offset(&self, offset: u64) -> Ordering {
if self.end() < offset {
Less
} else if self.start() > offset {
Greater
} else {
Equal
}
}
}
|
#[doc = "Register `DDRPHYC_DX0DQTR` reader"]
pub type R = crate::R<DDRPHYC_DX0DQTR_SPEC>;
#[doc = "Register `DDRPHYC_DX0DQTR` writer"]
pub type W = crate::W<DDRPHYC_DX0DQTR_SPEC>;
#[doc = "Field `DQDLY0` reader - DQDLY0"]
pub type DQDLY0_R = crate::FieldReader;
#[doc = "Field `DQDLY0` writer - DQDLY0"]
pub type DQDLY0_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DQDLY1` reader - DQDLY1"]
pub type DQDLY1_R = crate::FieldReader;
#[doc = "Field `DQDLY1` writer - DQDLY1"]
pub type DQDLY1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DQDLY2` reader - DQDLY2"]
pub type DQDLY2_R = crate::FieldReader;
#[doc = "Field `DQDLY2` writer - DQDLY2"]
pub type DQDLY2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DQDLY3` reader - DQDLY3"]
pub type DQDLY3_R = crate::FieldReader;
#[doc = "Field `DQDLY3` writer - DQDLY3"]
pub type DQDLY3_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DQDLY4` reader - DQDLY4"]
pub type DQDLY4_R = crate::FieldReader;
#[doc = "Field `DQDLY4` writer - DQDLY4"]
pub type DQDLY4_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DQDLY5` reader - DQDLY5"]
pub type DQDLY5_R = crate::FieldReader;
#[doc = "Field `DQDLY5` writer - DQDLY5"]
pub type DQDLY5_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DQDLY6` reader - DQDLY6"]
pub type DQDLY6_R = crate::FieldReader;
#[doc = "Field `DQDLY6` writer - DQDLY6"]
pub type DQDLY6_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `DQDLY7` reader - DQDLY7"]
pub type DQDLY7_R = crate::FieldReader;
#[doc = "Field `DQDLY7` writer - DQDLY7"]
pub type DQDLY7_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
impl R {
#[doc = "Bits 0:3 - DQDLY0"]
#[inline(always)]
pub fn dqdly0(&self) -> DQDLY0_R {
DQDLY0_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - DQDLY1"]
#[inline(always)]
pub fn dqdly1(&self) -> DQDLY1_R {
DQDLY1_R::new(((self.bits >> 4) & 0x0f) as u8)
}
#[doc = "Bits 8:11 - DQDLY2"]
#[inline(always)]
pub fn dqdly2(&self) -> DQDLY2_R {
DQDLY2_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - DQDLY3"]
#[inline(always)]
pub fn dqdly3(&self) -> DQDLY3_R {
DQDLY3_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - DQDLY4"]
#[inline(always)]
pub fn dqdly4(&self) -> DQDLY4_R {
DQDLY4_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 20:23 - DQDLY5"]
#[inline(always)]
pub fn dqdly5(&self) -> DQDLY5_R {
DQDLY5_R::new(((self.bits >> 20) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - DQDLY6"]
#[inline(always)]
pub fn dqdly6(&self) -> DQDLY6_R {
DQDLY6_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 28:31 - DQDLY7"]
#[inline(always)]
pub fn dqdly7(&self) -> DQDLY7_R {
DQDLY7_R::new(((self.bits >> 28) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - DQDLY0"]
#[inline(always)]
#[must_use]
pub fn dqdly0(&mut self) -> DQDLY0_W<DDRPHYC_DX0DQTR_SPEC, 0> {
DQDLY0_W::new(self)
}
#[doc = "Bits 4:7 - DQDLY1"]
#[inline(always)]
#[must_use]
pub fn dqdly1(&mut self) -> DQDLY1_W<DDRPHYC_DX0DQTR_SPEC, 4> {
DQDLY1_W::new(self)
}
#[doc = "Bits 8:11 - DQDLY2"]
#[inline(always)]
#[must_use]
pub fn dqdly2(&mut self) -> DQDLY2_W<DDRPHYC_DX0DQTR_SPEC, 8> {
DQDLY2_W::new(self)
}
#[doc = "Bits 12:15 - DQDLY3"]
#[inline(always)]
#[must_use]
pub fn dqdly3(&mut self) -> DQDLY3_W<DDRPHYC_DX0DQTR_SPEC, 12> {
DQDLY3_W::new(self)
}
#[doc = "Bits 16:19 - DQDLY4"]
#[inline(always)]
#[must_use]
pub fn dqdly4(&mut self) -> DQDLY4_W<DDRPHYC_DX0DQTR_SPEC, 16> {
DQDLY4_W::new(self)
}
#[doc = "Bits 20:23 - DQDLY5"]
#[inline(always)]
#[must_use]
pub fn dqdly5(&mut self) -> DQDLY5_W<DDRPHYC_DX0DQTR_SPEC, 20> {
DQDLY5_W::new(self)
}
#[doc = "Bits 24:27 - DQDLY6"]
#[inline(always)]
#[must_use]
pub fn dqdly6(&mut self) -> DQDLY6_W<DDRPHYC_DX0DQTR_SPEC, 24> {
DQDLY6_W::new(self)
}
#[doc = "Bits 28:31 - DQDLY7"]
#[inline(always)]
#[must_use]
pub fn dqdly7(&mut self) -> DQDLY7_W<DDRPHYC_DX0DQTR_SPEC, 28> {
DQDLY7_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DDRPHYC byte lane 0 DQT register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ddrphyc_dx0dqtr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ddrphyc_dx0dqtr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DDRPHYC_DX0DQTR_SPEC;
impl crate::RegisterSpec for DDRPHYC_DX0DQTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ddrphyc_dx0dqtr::R`](R) reader structure"]
impl crate::Readable for DDRPHYC_DX0DQTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ddrphyc_dx0dqtr::W`](W) writer structure"]
impl crate::Writable for DDRPHYC_DX0DQTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DDRPHYC_DX0DQTR to value 0xffff_ffff"]
impl crate::Resettable for DDRPHYC_DX0DQTR_SPEC {
const RESET_VALUE: Self::Ux = 0xffff_ffff;
}
|
macro_rules! toml_template {
($name:expr, $edition:expr) => {
format_args!(
r##"[package]
name = "{name}-fuzz"
version = "0.0.0"
publish = false
{edition}
[package.metadata]
cargo-fuzz = true
[dependencies]
libfuzzer-sys = "0.4"
[dependencies.{name}]
path = ".."
# Prevent this from interfering with workspaces
[workspace]
members = ["."]
[profile.release]
debug = 1
"##,
name = $name,
edition = if let Some(edition) = &$edition {
format!("edition = \"{}\"\n", edition)
} else {
String::new()
},
)
};
}
macro_rules! toml_bin_template {
($name: expr) => {
format_args!(
r#"
[[bin]]
name = "{0}"
path = "fuzz_targets/{0}.rs"
test = false
doc = false
"#,
$name
)
};
}
macro_rules! gitignore_template {
() => {
format_args!(
r##"target
corpus
artifacts
coverage
"##
)
};
}
macro_rules! target_template {
($edition:expr) => {
format_args!(
r##"#![no_main]
{extern_crate}
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {{
// fuzzed code goes here
}});
"##,
extern_crate = match $edition.as_deref() {
None | Some("2015") => "\nextern crate libfuzzer_sys;\n",
Some(_) => "",
},
)
};
}
|
use crate::NodeClient;
use async_trait::async_trait;
use lru::LruCache;
use parking_lot::Mutex;
use subspace_archiving::archiver::is_piece_valid;
use subspace_core_primitives::crypto::kzg::Kzg;
use subspace_core_primitives::{Piece, PieceIndex, SegmentCommitment, SegmentIndex};
use subspace_networking::libp2p::PeerId;
use subspace_networking::utils::piece_provider::PieceValidator;
use subspace_networking::Node;
use tracing::{error, warn};
pub struct SegmentCommitmentPieceValidator<NC> {
dsn_node: Node,
node_client: NC,
kzg: Kzg,
segment_commitment_cache: Mutex<LruCache<SegmentIndex, SegmentCommitment>>,
}
impl<NC> SegmentCommitmentPieceValidator<NC> {
pub fn new(
dsn_node: Node,
node_client: NC,
kzg: Kzg,
segment_commitment_cache: Mutex<LruCache<SegmentIndex, SegmentCommitment>>,
) -> Self {
Self {
dsn_node,
node_client,
kzg,
segment_commitment_cache,
}
}
}
#[async_trait]
impl<NC> PieceValidator for SegmentCommitmentPieceValidator<NC>
where
NC: NodeClient,
{
async fn validate_piece(
&self,
source_peer_id: PeerId,
piece_index: PieceIndex,
piece: Piece,
) -> Option<Piece> {
if source_peer_id != self.dsn_node.id() {
let segment_index = piece_index.segment_index();
let maybe_segment_commitment = self
.segment_commitment_cache
.lock()
.get(&segment_index)
.copied();
let segment_commitment = match maybe_segment_commitment {
Some(segment_commitment) => segment_commitment,
None => {
let segment_headers =
match self.node_client.segment_headers(vec![segment_index]).await {
Ok(segment_headers) => segment_headers,
Err(error) => {
error!(
%piece_index,
?error,
"Failed tor retrieve segment headers from node"
);
return None;
}
};
let segment_commitment = match segment_headers.into_iter().next().flatten() {
Some(segment_header) => segment_header.segment_commitment(),
None => {
error!(
%piece_index,
%segment_index,
"Segment commitment for segment index wasn't found on node"
);
return None;
}
};
self.segment_commitment_cache
.lock()
.push(segment_index, segment_commitment);
segment_commitment
}
};
if !is_piece_valid(
&self.kzg,
&piece,
&segment_commitment,
piece_index.position(),
) {
warn!(
%piece_index,
%source_peer_id,
"Received invalid piece from peer"
);
// We don't care about result here
let _ = self.dsn_node.ban_peer(source_peer_id).await;
return None;
}
}
Some(piece)
}
}
|
use super::{Load, traits::{Allocate, New}};
use image::{GenericImage};
use super::utils;
use super::shader;
use super::vao;
#[derive(Debug, Default)]
pub struct Texture {
id: gl::types::GLuint,
width: i32,
height: i32,
quad_shader: shader::Shader,
quad: vao::Vao,
internal_format: gl::types::GLint,
data_type: gl::types::GLuint,
allocated: bool,
min_filter: gl::types::GLuint,
mag_filter: gl::types::GLuint,
wrap_horizontal: gl::types::GLuint,
wrap_vertical: gl::types::GLuint
}
/*
https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glTexImage2D.xhtml
- internal_format
GL_DEPTH_COMPONENT GL_DEPTH_STENCIL GL_RED GL_RG GL_RGB GL_RGBA
*/
impl New<(i32, i32, i32)> for Texture {
fn new(args: (i32, i32, i32)) -> Self {
let width = args.0;
let height = args.1;
let internal_format = args.2;
unsafe {
let mut quad = vao::Vao::new();
quad.create_quad();
let mut quad_shader = shader::Shader::new();
quad_shader.load((utils::VS_QUAD, utils::FS_QUAD));
let mut id = std::mem::zeroed();
gl::GenTextures(1, &mut id);
gl::BindTexture(gl::TEXTURE_2D, id);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as f32);
gl::TexImage2D(gl::TEXTURE_2D, 0, gl::RGB as i32, width, height, 0, gl::RGB, gl::UNSIGNED_BYTE, std::ptr::null());
gl::BindTexture(gl::TEXTURE_2D, 0);
Texture{ id: id, width:width, height:height, quad_shader: quad_shader, quad: quad, internal_format: gl::RGBA as i32, data_type: gl::UNSIGNED_BYTE, allocated: false, wrap_horizontal: gl::CLAMP_TO_EDGE, wrap_vertical: gl::CLAMP_TO_EDGE, min_filter: gl::LINEAR, mag_filter: gl::LINEAR}
}
}
}
impl New<&str> for Texture {
fn new(args: &str) -> Self {
let path = args;
let img = image::open(&std::path::Path::new(path)).expect("Failed to load image");
let size = img.dimensions();
let width = size.0 as i32;
let height = size.1 as i32;
let data = img.raw_pixels();
unsafe {
let mut quad = vao::Vao::new();
quad.create_quad();
let mut quad_shader = shader::Shader::new();
quad_shader.load((utils::VS_QUAD, utils::FS_QUAD));
let mut id = std::mem::zeroed();
gl::GenTextures(1, &mut id);
gl::BindTexture(gl::TEXTURE_2D, id);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as f32);
// set texture filtering parameters
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32);
gl::TexImage2D(gl::TEXTURE_2D, 0, gl::RGB as i32, width, height, 0, gl::RGB, gl::UNSIGNED_BYTE, data.as_ptr() as *const _);
gl::GenerateMipmap(gl::TEXTURE_2D);
gl::BindTexture(gl::TEXTURE_2D, 0);
Texture{ id: id, width: width, height: height, quad_shader: quad_shader, quad: quad, internal_format: gl::RGBA as i32, data_type: gl::UNSIGNED_BYTE, allocated: false, wrap_horizontal: gl::CLAMP_TO_EDGE, wrap_vertical: gl::CLAMP_TO_EDGE, min_filter: gl::LINEAR, mag_filter: gl::LINEAR}
}
}
}
impl Allocate<(i32, i32, i32)> for Texture {
fn allocate(&mut self, args: (i32, i32, i32)) -> &mut Self {
self.width = args.0;
self.height = args.1;
self.internal_format = args.2;
self.allocate()
}
}
impl Texture {
fn load_image(&mut self, path: &str) -> &mut Self {
let img = image::open(&std::path::Path::new(path)).expect("Failed to load image");
let size = img.dimensions();
self.width = size.0 as i32;
self.height = size.1 as i32;
let data = img.raw_pixels();
unsafe {
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, self.wrap_horizontal as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, self.wrap_vertical as f32);
// set texture filtering parameters
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, self.min_filter as i32);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, self.mag_filter as i32);
gl::TexImage2D(gl::TEXTURE_2D, 0, gl::RGB as i32, self.width, self.height, 0, gl::RGB, gl::UNSIGNED_BYTE, data.as_ptr() as *const _);
gl::GenerateMipmap(gl::TEXTURE_2D);
gl::BindTexture(gl::TEXTURE_2D, 0);
}
self
}
pub fn set_wrap_mode(&mut self, horizontal: gl::types::GLuint, vertical: gl::types::GLuint) -> &mut Self {
unsafe {
self.wrap_horizontal = horizontal;
self.wrap_vertical = vertical;
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, self.wrap_horizontal as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, self.wrap_vertical as f32);
gl::BindTexture(gl::TEXTURE_2D, 0);
}
self
}
pub fn set_filter_mode(&mut self, min_filter: gl::types::GLuint, mag_filter: gl::types::GLuint) -> &mut Self {
unsafe {
self.min_filter = min_filter;
self.mag_filter = mag_filter;
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, self.min_filter as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, self.mag_filter as f32);
gl::BindTexture(gl::TEXTURE_2D, 0);
}
self
}
pub fn bind(&self) {
unsafe {
gl::BindTexture(gl::TEXTURE_2D, self.id);
}
}
pub fn unbind(&self) {
unsafe {
gl::BindTexture(gl::TEXTURE_2D, 0);
}
}
pub fn get_width(&self) -> i32 {
self.width
}
pub fn get_height(&self) -> i32 {
self.height
}
pub fn get(&self) -> &gl::types::GLuint {
&self.id
}
pub fn draw(&self) {
unsafe {
self.quad_shader.begin();
self.quad_shader.uniform_texture("u_src", self.get());
self.quad.draw_elements(gl::TRIANGLES);
self.quad_shader.end();
}
}
fn allocate(&mut self)-> &mut Self {
unsafe {
gl::BindTexture(gl::TEXTURE_2D, self.id);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, self.min_filter as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, self.min_filter as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, self.wrap_horizontal as f32);
gl::TexParameterf(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, self.wrap_vertical as f32);
gl::TexImage2D(gl::TEXTURE_2D, 0, self.internal_format, self.width, self.height, 0, self.internal_format as u32, self.data_type, std::ptr::null());
gl::BindTexture(gl::TEXTURE_2D, 0);
}
self
}
}
|
use crate::{Vec3, Ray, Intersect, DistanceField, SurfaceInteraction};
/// A sphere primitive.
pub struct Sphere {
origin: Vec3,
radius: f32,
}
impl Sphere {
/// Create a new Sphere.
///
/// # Examples
/// ```
/// use obscura::{Sphere, Vec3};
///
/// let origin = Vec3::zero();
/// let x = Sphere::new(&origin, 1.0);
/// ```
pub fn new(origin: &Vec3, radius: f32) -> Sphere {
Sphere {
origin: Vec3::new(origin.x, origin.y, origin.z),
radius: radius
}
}
fn t_to_surface_interaction(&self, ray: &Ray, t:f32) -> SurfaceInteraction {
let point: Vec3 = Vec3::add(&ray.origin, &Vec3::scale(&ray.direction, t));
let normal: Vec3 = Vec3::normalize(&Vec3::sub(&point, &self.origin));
SurfaceInteraction {
point: point,
t_hit: t,
normal: normal
}
}
}
impl Intersect for Sphere {
/// Intersect ray with sphere.
/// Return t or t1,t2 where intersection point p = ray.origin + t * ray.direction
///
/// # Examples
/// ```
/// use obscura::{Sphere, Vec3, Ray, SurfaceInteraction, Intersect};
///
/// let origin = Vec3::zero();
///
/// let target: Sphere = Sphere::new(
/// &Vec3::new(2.0, 0.0, 0.0),
/// 1.0
/// );
///
/// // A ray that intersects the sphere twice.
/// let ray1: Ray = Ray::new(
/// &origin,
/// &Vec3::new(1.0, 0.0, 0.0)
/// );
///
/// let point = Vec3::new(1.0, 0.0, 0.0);
/// let normal = Vec3::new(-1.0, 0.0, 0.0);
///
/// if let Some(interaction)= Sphere::intersect(&target, &ray1) {
/// assert_eq!(interaction.point, point);
/// assert_eq!(interaction.normal, normal);
/// } else {
/// panic!("Oh no!");
/// }
///
/// // A ray which never intersects the sphere.
/// let ray2: Ray = Ray::new(
/// &origin,
/// &Vec3::new(0.0, 0.0, 1.0)
/// );
///
/// let result = target.intersect(&ray2);
/// assert_eq!(result, None);
/// ```
fn intersect(&self, ray: &Ray) -> Option<SurfaceInteraction> {
// http://viclw17.github.io/2018/07/16/raytracing-ray-sphere-intersection/
let target = self;
let a = Vec3::dot(&ray.direction, &ray.direction);
let b = 2.0 * Vec3::dot(
&ray.direction,
&Vec3::sub(&ray.origin, &target.origin)
);
let c = Vec3::dot(
&Vec3::sub(&ray.origin, &target.origin),
&Vec3::sub(&ray.origin, &target.origin)
) - (target.radius * target.radius);
let discriminant = b*b - 4.0*a*c;
if discriminant < 0.0 {
None
} else if discriminant == 0.0 {
let t = (-b + f32::sqrt(discriminant)) / (2.0 * a);
Some(self.t_to_surface_interaction(&ray, t))
} else {
let t1 = ((-b) - f32::sqrt(discriminant)) / (2.0 * a);
let t2 = ((-b) + f32::sqrt(discriminant)) / (2.0 * a);
let t = if t1 > t2 { t1 } else { t2 };
if t <= 0.0 {
None
} else {
Some(self.t_to_surface_interaction(&ray, t1))
}
}
}
}
impl DistanceField for Sphere {
/// Implements the signed distance function for a sphere
///
/// # Examples
/// ```
/// use obscura::{Vec3, Sphere, DistanceField};
///
/// // Create a unit sphere at the origin
/// let sphere = Sphere::new(
/// &Vec3::new(0.0, 0.0, 0.0),
/// 1.0
/// );
///
/// // The smallest distance from (5, 0, 0) to the surface is 4.0:
/// let viewpoint = Vec3::new(5.0, 0.0, 0.0);
/// let sdf = sphere.signed_distance(&viewpoint);
///
/// assert_eq!(sdf, 4.0);
/// ```
fn signed_distance(&self, from: &Vec3) -> f32 {
let length = Vec3::length(
&Vec3::sub(
&self.origin,
&from
));
length - self.radius
}
} |
use crate::rendering::meshrenderable::scale_color;
use crate::rendering::render_context::RenderContext;
use cgmath::{InnerSpace, Vector2};
use ggez::graphics::{Color, WHITE};
use scale::map_model::{Map, TrafficBehavior};
pub struct RoadRenderer;
const MID_GRAY: Color = Color {
r: 0.5,
g: 0.5,
b: 0.5,
a: 1.0,
};
impl RoadRenderer {
pub fn new() -> Self {
RoadRenderer
}
pub fn render(&mut self, map: &Map, time: u64, rc: &mut RenderContext) {
let navmesh = &map.navmesh;
rc.sr.color = WHITE;
for (id, n) in navmesh {
rc.sr.draw_circle(n.pos, 4.25);
for e in navmesh.get_neighs(id) {
let p2 = navmesh.get(e.to).unwrap().pos;
rc.sr.draw_stroke(n.pos, p2, 8.5);
}
}
rc.sr.color = MID_GRAY;
for (id, n) in navmesh {
rc.sr.draw_circle(n.pos, 3.75);
for e in navmesh.get_neighs(id) {
let p2 = navmesh.get(e.to).unwrap().pos;
rc.sr.draw_stroke(n.pos, p2, 7.5);
}
}
// draw traffic lights
for (id, n) in navmesh {
if n.control.is_always() {
continue;
}
let id = navmesh.get_backward_neighs(id).first().map(|x| x.to);
if id.is_none() {
rc.sr.color = scale_color(scale::rendering::RED);
rc.sr.color.a = 0.5;
rc.sr.draw_rect_centered(n.pos, 20.0, 20.0);
continue;
}
let id = id.unwrap();
let dir = (navmesh[id].pos - n.pos).normalize();
let dir_nor: Vector2<f32> = [-dir.y, dir.x].into();
let r_center = n.pos + dir_nor * 2.0;
if n.control.is_stop() {
rc.sr.color = scale_color(scale::rendering::RED);
rc.sr.draw_rect_cos_sin(
r_center,
1.0,
1.0,
std::f32::consts::FRAC_1_SQRT_2,
std::f32::consts::FRAC_1_SQRT_2,
);
continue;
}
rc.sr.color = scale_color(scale::rendering::Color::gray(0.3));
rc.sr.draw_rect_cos_sin(r_center, 1.1, 3.1, dir.x, dir.y);
rc.sr.color = scale_color(scale::rendering::Color::gray(0.1));
for i in -1..2 {
rc.sr.draw_circle(r_center + i as f32 * dir_nor, 0.5);
}
rc.sr.color = scale_color(n.control.get_behavior(time).as_render_color());
let offset = match n.control.get_behavior(time) {
TrafficBehavior::RED => -1.0,
TrafficBehavior::ORANGE => 0.0,
TrafficBehavior::GREEN => 1.0,
_ => unreachable!(),
};
rc.sr.draw_circle(r_center + offset * dir_nor, 0.5);
}
}
}
|
extern crate trousers_sys;
use std::error;
use std::ffi;
use std::fmt;
use std::slice;
use trousers_sys::trousers::*;
use trousers_sys::tspi::*;
pub type TssFlag = u32;
pub type TssHObject = u32;
pub type TssHContext = TssHObject;
pub type TssHTPM = TssHObject;
pub type TssHPCRS = TssHObject;
pub type TssResult = u32;
pub type TssUnicode = u16;
// TODO move this to a separate module
#[derive(Debug)]
pub struct TssError {
pub result: TssResult
}
impl fmt::Display for TssError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(fmt.write_str(error::Error::description(self)));
Ok(())
}
}
impl error::Error for TssError {
fn description(&self) -> &str {
let c_buf = unsafe { Trspi_Error_String(self.result) };
let c_str = unsafe { ffi::CStr::from_ptr(c_buf) };
let buf: &[u8] = c_str.to_bytes();
let str_slice: &str = std::str::from_utf8(buf).unwrap();
str_slice
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
// TODO macros for the funcitons below
// TODO move constants into a separate module?
pub const TSS_TSPATTRIB_KEY_INFO: TssFlag = 0x00000080;
pub const TSS_TSPATTRIB_KEYINFO_ALGORITHM: TssFlag = 0x00000280;
pub const TSS_TSPATTRIB_RSAKEY_INFO: TssFlag = 0x00000140;
pub const TSS_TSPATTRIB_KEYINFO_RSA_PRIMES: TssFlag = 0x00004000;
pub const TSS_ALG_RSA: TssFlag = 0x20;
pub const TSS_SUCCESS: TssResult = 0;
pub const TSS_UUID_SRK: TSS_UUID = TSS_UUID { ulTimeLow: 0, usTimeMid: 0, usTimeHigh: 0, bClockSeqHigh: 0, bClockSeqLow: 0, rgbNode: [0, 0, 0, 0, 0, 1] };
pub const TSS_PS_TYPE_USER: TssFlag = 1;
pub const TSS_PS_TYPE_SYSTEM: TssFlag = 2;
const TSS_OBJECT_TYPE_POLICY: TssFlag = 1;
const TSS_OBJECT_TYPE_RSAKEY: TssFlag = 2;
const TSS_OBJECT_TYPE_PCRS: TssFlag = 4;
const TSS_POLICY_USAGE: TssFlag = 1;
const TSS_POLICY_MIGRATION: TssFlag = 2;
const TSS_POLICY_OPERATOR: TssFlag = 3;
const TSS_PCRS_STRUCT_DEFAULT: TssFlag = 0;
const TSS_PCRS_STRUCT_INFO: TssFlag = 1;
const TSS_PCRS_STRUCT_INFO_LONG: TssFlag = 2;
const TSS_PCRS_STRUCT_INFO_SHORT: TssFlag = 3;
const TSS_KEY_NO_AUTHORIZATION: TssFlag = 0x00000000;
const TSS_KEY_AUTHORIZATION: TssFlag = 0x00000001;
const TSS_KEY_AUTHORIZATION_PRIV_USE_ONLY: TssFlag = 0x00000002;
const TSS_KEY_NON_VOLATILE: TssFlag = 0x00000000;
const TSS_KEY_VOLATILE: TssFlag = 0x00000004;
const TSS_KEY_NOT_MIGRATABLE: TssFlag = 0x00000000;
const TSS_KEY_MIGRATABLE: TssFlag = 0x00000008;
// TODO: certified
const TSS_KEY_TYPE_SIGNING: TssFlag = 0x00000010;
const TSS_KEY_TYPE_STORAGE: TssFlag = 0x00000020;
const TSS_KEY_TYPE_IDENTITY: TssFlag = 0x00000030;
const TSS_KEY_TYPE_AUTHCHANGE: TssFlag = 0x00000040;
const TSS_KEY_TYPE_BIND: TssFlag = 0x00000050;
const TSS_KEY_TYPE_LEGACY: TssFlag = 0x00000060;
const TSS_KEY_TYPE_MIGRATE: TssFlag = 0x00000070;
const TSS_KEY_SIZE_DEFAULT: TssFlag = 0x00000000;
const TSS_KEY_SIZE_512: TssFlag = 0x00000100;
const TSS_KEY_SIZE_1024: TssFlag = 0x00000200;
const TSS_KEY_SIZE_2048: TssFlag = 0x00000300;
const TSS_KEY_SIZE_4096: TssFlag = 0x00000400;
const TSS_KEY_SIZE_8192: TssFlag = 0x00000500;
const TSS_KEY_SIZE_16384: TssFlag = 0x00000600;
const TSS_KEY_STRUCT_DEFAULT: TssFlag = 0x00000000;
const TSS_KEY_STRUCT_KEY: TssFlag = 0x00004000;
const TSS_KEY_STRUCT_KEY12: TssFlag = 0x00008000;
pub enum TssPolicyInitFlag {
Usage, Migration, Operator
}
pub enum TssKeySize {
Default = TSS_KEY_SIZE_DEFAULT as isize,
Size512 = TSS_KEY_SIZE_512 as isize,
Size1024 = TSS_KEY_SIZE_1024 as isize,
Size2048 = TSS_KEY_SIZE_2048 as isize,
Size4096 = TSS_KEY_SIZE_4096 as isize,
Size8192 = TSS_KEY_SIZE_8192 as isize,
Size16384 = TSS_KEY_SIZE_16384 as isize
}
pub enum TssKeyType {
Signing = TSS_KEY_TYPE_SIGNING as isize,
Storage = TSS_KEY_TYPE_STORAGE as isize,
Identity = TSS_KEY_TYPE_IDENTITY as isize,
AuthChange = TSS_KEY_TYPE_AUTHCHANGE as isize,
Bind = TSS_KEY_TYPE_BIND as isize,
Legacy = TSS_KEY_TYPE_LEGACY as isize,
Migrate = TSS_KEY_TYPE_MIGRATE as isize
}
pub enum TssKeyAuthorization {
NoAuthorization = TSS_KEY_NO_AUTHORIZATION as isize,
Authorization = TSS_KEY_AUTHORIZATION as isize,
AuthorizationPrivUseOnly = TSS_KEY_AUTHORIZATION_PRIV_USE_ONLY as isize
}
pub enum TssKeyVolatility {
NonVolatile = TSS_KEY_NON_VOLATILE as isize,
Volatile = TSS_KEY_VOLATILE as isize
}
pub enum TssKeyMigratability {
NotMigratable = TSS_KEY_NOT_MIGRATABLE as isize,
Migratable = TSS_KEY_MIGRATABLE as isize
}
pub enum TssKeyStruct {
Default = TSS_KEY_STRUCT_DEFAULT as isize,
Key = TSS_KEY_STRUCT_KEY as isize,
Key12 = TSS_KEY_STRUCT_KEY12 as isize
}
pub trait TssObject {
fn get_handle(&self) -> TssHObject;
fn set_attrib_uint32(&self, attrib_flag: TssFlag, sub_flag: TssFlag, attrib: u32) -> Result<(), TssError>;
fn set_attrib_data(&self, attrib_flag: TssFlag, sub_flag: TssFlag, attrib_data: &[u8]) -> Result<(), TssError>;
}
pub struct TssContext {
pub handle: u32
}
pub struct TssTPM<'context> {
pub context: &'context TssContext,
pub handle: u32
}
pub enum TssPcrsStructType {
Default, Info, InfoLong, InfoShort
}
// TODO: need to have Drop?
pub struct TssPolicy<'context> {
pub context: &'context TssContext,
pub handle: TssHPCRS
}
// TODO: need to have Drop?
pub struct TssRsaKey<'context> {
pub context: &'context TssContext,
pub handle: TssHObject
}
pub struct TssValidation {
pub version_info: TSS_VERSION,
pub external_data: Vec<u8>,
pub data: Vec<u8>,
pub validation_data: Vec<u8>
}
pub struct TssPCRCompositeInfo<'context> {
pub context: &'context TssContext,
pub handle: TssHPCRS
}
pub struct TssPCRCompositeInfoLong<'context> {
pub context: &'context TssContext,
pub handle: TssHPCRS
}
pub struct TssPCRCompositeInfoShort<'context> {
pub context: &'context TssContext,
pub handle: TssHPCRS
}
pub trait TcpaPcrInfoAny {
fn get_handle(&self) -> TssHPCRS;
}
#[allow(non_camel_case_types)]
pub trait TcpaPcrInfo1_1 : TcpaPcrInfoAny {
fn get_handle(&self) -> TssHPCRS;
}
#[allow(non_camel_case_types)]
pub trait TcpaPcrInfo1_2 : TcpaPcrInfoAny {
fn get_handle(&self) -> TssHPCRS;
fn select_pcr_index_ex(&self, pcr_index: u32, direction: u32) -> Result<(), TssError>;
}
impl<'c> TcpaPcrInfoAny for TssPCRCompositeInfo<'c> {
fn get_handle(&self) -> TssHPCRS { self.handle }
}
impl<'c> TcpaPcrInfo1_1 for TssPCRCompositeInfo<'c> {
fn get_handle(&self) -> TssHPCRS { self.handle }
}
impl<'c> TcpaPcrInfoAny for TssPCRCompositeInfoLong<'c> {
fn get_handle(&self) -> TssHPCRS { self.handle }
}
impl<'c> TcpaPcrInfoAny for TssPCRCompositeInfoShort<'c> {
fn get_handle(&self) -> TssHPCRS { self.handle }
}
fn copy_raw_ptr_to_vec(ptr: *const u8, length: usize) -> Vec<u8> {
let ptr_slice = unsafe {
slice::from_raw_parts(ptr, length)
};
let mut vec = Vec::new();
for byte in ptr_slice {
vec.push(*byte)
}
vec
}
fn set_attrib_uint32_impl(object: &TssObject, attrib_flag: TssFlag, sub_flag: TssFlag, attrib: u32) -> Result<(), TssError> {
let result = unsafe {
Tspi_SetAttribUint32(object.get_handle(), attrib_flag, sub_flag, attrib)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(())
}
fn set_attrib_data_impl(object: &TssObject, attrib_flag: TssFlag, sub_flag: TssFlag, attrib_data: &[u8]) -> Result<(), TssError> {
let result = unsafe {
// TODO is usize to u32 cast safe?
Tspi_SetAttribData(object.get_handle(), attrib_flag, sub_flag, attrib_data.len() as u32, attrib_data.as_ptr() as *mut u8)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(())
}
impl TssContext {
pub fn new() -> Result<TssContext, TssError> {
let mut handle = 0;
let result = unsafe {
Tspi_Context_Create(&mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssContext { handle: handle })
}
// TODO: support destination
pub fn connect(&self) -> Result<(), TssError> {
let result = unsafe {
Tspi_Context_Connect(self.handle, 0 as *mut u16)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(())
}
pub fn get_tpm_object(&self) -> Result<TssTPM, TssError> {
let mut handle = 0;
let result = unsafe {
Tspi_Context_GetTpmObject(self.handle, &mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssTPM { context: self, handle: handle })
}
pub fn load_key_by_uuid(&self, persistent_storage_type: TssFlag, uuid_data: TSS_UUID) -> Result<TssRsaKey, TssError> {
let mut handle = 0;
let result = unsafe {
Tspi_Context_LoadKeyByUUID(self.handle, persistent_storage_type, uuid_data, &mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssRsaKey { context: self, handle: handle })
}
// TODO: DRY creating objects, probably use try!
// TODO: make this signature shorter? give default values?
pub fn create_rsakey(&self, key_size: TssKeySize, key_type: TssKeyType,
auth: TssKeyAuthorization, volatile: TssKeyVolatility, migratable: TssKeyMigratability,
struct_version: TssKeyStruct) -> Result<TssRsaKey, TssError> {
let init_flags = key_size as u32 | key_type as u32 | auth as u32 | volatile as u32 | migratable as u32 | struct_version as u32;
let mut handle = 0;
let result = unsafe {
Tspi_Context_CreateObject(self.handle, TSS_OBJECT_TYPE_RSAKEY, init_flags, &mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssRsaKey { context: self, handle: handle })
}
pub fn create_policy(&self, init_flag: TssPolicyInitFlag) -> Result<TssPolicy, TssError> {
let init_flags = match init_flag {
TssPolicyInitFlag::Usage => TSS_POLICY_USAGE,
TssPolicyInitFlag::Migration => TSS_POLICY_MIGRATION,
TssPolicyInitFlag::Operator => TSS_POLICY_OPERATOR
};
let mut handle = 0;
let result = unsafe {
Tspi_Context_CreateObject(self.handle, TSS_OBJECT_TYPE_POLICY, init_flags, &mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssPolicy { context: self, handle: handle })
}
pub fn create_pcr_composite_info(&self) -> Result<TssPCRCompositeInfo, TssError> {
let mut handle = 0;
let result = unsafe {
Tspi_Context_CreateObject(self.handle, TSS_OBJECT_TYPE_PCRS, TSS_PCRS_STRUCT_INFO, &mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssPCRCompositeInfo { context: self, handle: handle })
}
pub fn create_pcr_composite_info_long(&self) -> Result<TssPCRCompositeInfoLong, TssError> {
let mut handle = 0;
let result = unsafe {
Tspi_Context_CreateObject(self.handle, TSS_OBJECT_TYPE_PCRS, TSS_PCRS_STRUCT_INFO_LONG, &mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssPCRCompositeInfoLong { context: self, handle: handle })
}
pub fn create_pcr_composite_info_short(&self) -> Result<TssPCRCompositeInfoShort, TssError> {
let mut handle = 0;
let result = unsafe {
Tspi_Context_CreateObject(self.handle, TSS_OBJECT_TYPE_PCRS, TSS_PCRS_STRUCT_INFO_SHORT, &mut handle)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(TssPCRCompositeInfoShort { context: self, handle: handle })
}
}
impl Drop for TssContext {
fn drop(&mut self) {
unsafe {
Tspi_Context_FreeMemory(self.handle, 0 as *mut u8);
Tspi_Context_Close(self.handle);
}
}
}
impl<'context> TssTPM<'context> {
// TODO: UNTESTED
pub fn quote(&self, ident_key: &TssRsaKey, pcr_composite: &TssPCRCompositeInfo, external_data: &[u8; 20]) -> Result<TssValidation, TssError> {
let mut validation_data = TSS_VALIDATION { versionInfo: TSS_VERSION { bMajor: 0, bMinor: 0, bRevMajor: 0, bRevMinor: 0 }, ulExternalDataLength: 20, rgbExternalData: external_data.as_ptr() as *mut u8, ulDataLength: 0, rgbData: 0 as *mut u8, ulValidationDataLength: 0, rgbValidationData: 0 as *mut u8 };
let result = unsafe {
Tspi_TPM_Quote(self.handle, ident_key.handle, pcr_composite.handle, &mut validation_data)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
let validation_result = TssValidation {
version_info: validation_data.versionInfo.clone(),
external_data: copy_raw_ptr_to_vec(external_data as *const u8, external_data.len()),
data: copy_raw_ptr_to_vec(validation_data.rgbData, validation_data.ulDataLength as usize),
validation_data: copy_raw_ptr_to_vec(validation_data.rgbValidationData, validation_data.ulValidationDataLength as usize)
};
unsafe {
Tspi_Context_FreeMemory(self.context.handle, validation_data.rgbData);
Tspi_Context_FreeMemory(self.context.handle, validation_data.rgbValidationData);
}
Ok(validation_result)
}
pub fn pcr_read(&self, pcr_index: u32) -> Result<Vec<u8>, TssError> {
let mut pcr_value_length = 0;
let mut pcr_value_ptr = 0 as *mut u8;
let result = unsafe {
Tspi_TPM_PcrRead(self.handle, pcr_index, &mut pcr_value_length, &mut pcr_value_ptr)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
let mut vec = Vec::new();
unsafe {
for i in 0..pcr_value_length {
// TODO: Is this isize cast safe?
vec.push(*pcr_value_ptr.offset(i as isize));
}
Tspi_Context_FreeMemory(self.context.handle, pcr_value_ptr);
}
Ok(vec)
}
// TODO: events
pub fn pcr_extend(&self, pcr_index: u32, data: &[u8]) -> Result<Vec<u8>, TssError> {
let mut pcr_value_length = 0;
let mut pcr_value_ptr = 0 as *mut u8;
let result = unsafe {
// TODO: Is this u32 cast safe?
Tspi_TPM_PcrExtend(self.handle, pcr_index, data.len() as u32, data.as_ptr() as *mut u8, 0 as *mut Struct_tdTSS_PCR_EVENT, &mut pcr_value_length, &mut pcr_value_ptr)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
// TODO: DRY with above
let mut vec = Vec::new();
unsafe {
for i in 0..pcr_value_length {
// TODO: Is this isize cast safe?
vec.push(*pcr_value_ptr.offset(i as isize));
}
Tspi_Context_FreeMemory(self.context.handle, pcr_value_ptr);
}
Ok(vec)
}
pub fn pcr_reset(&self, pcr_composite: &TcpaPcrInfoAny) -> Result<(), TssError> {
let result = unsafe {
Tspi_TPM_PcrReset(self.handle, pcr_composite.get_handle())
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(())
}
}
impl<'context> TssPolicy<'context> {
//pub fn set_secret(mode: TssSecretMode, secret_length: u32, secret: &[u8]) -> Result<(), TssError> {
// TODO
//}
}
impl<'c> TssObject for TssRsaKey<'c> {
fn get_handle(&self) -> TssHObject { self.handle }
fn set_attrib_uint32(&self, attrib_flag: TssFlag, sub_flag: TssFlag, attrib: u32) -> Result<(), TssError> {
set_attrib_uint32_impl(self, attrib_flag, sub_flag, attrib)
}
fn set_attrib_data(&self, attrib_flag: TssFlag, sub_flag: TssFlag, attrib_data: &[u8]) -> Result<(), TssError> {
set_attrib_data_impl(self, attrib_flag, sub_flag, attrib_data)
}
}
fn pcr_composite_select_pcr_index_ex(handle: TssHPCRS, pcr_index: u32, direction: u32) -> Result<(), TssError> {
let result = unsafe {
Tspi_PcrComposite_SelectPcrIndexEx(handle, pcr_index, direction)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(())
}
impl<'c> TssPCRCompositeInfo<'c> {
fn select_pcr_index(&self, pcr_index: u32) -> Result<(), TssError> {
let result = unsafe {
Tspi_PcrComposite_SelectPcrIndex(self.handle, pcr_index)
};
if result != TSS_SUCCESS {
return Err(TssError { result: result });
}
Ok(())
}
}
impl<'c> TcpaPcrInfo1_2 for TssPCRCompositeInfoLong<'c> {
fn get_handle(&self) -> u32 { self.handle }
fn select_pcr_index_ex(&self, pcr_index: u32, direction: u32) -> Result<(), TssError> {
pcr_composite_select_pcr_index_ex(self.handle, pcr_index, direction)
}
}
impl<'c> TcpaPcrInfo1_2 for TssPCRCompositeInfoShort<'c> {
fn get_handle(&self) -> u32 { self.handle }
fn select_pcr_index_ex(&self, pcr_index: u32, direction: u32) -> Result<(), TssError> {
pcr_composite_select_pcr_index_ex(self.handle, pcr_index, direction)
}
}
// Need to implement Drop?
|
// Copyright (c) Facebook, Inc. and its affiliates.
use anyhow::{anyhow, bail, Result};
use crossbeam::channel::Sender;
use env_logger;
use glob::glob;
use lazy_static::lazy_static;
use log::{info, warn};
use num;
use simplelog as sl;
use std::cell::RefCell;
use std::env;
use std::ffi::{CString, OsStr, OsString};
use std::fs;
use std::io::prelude::*;
use std::io::BufReader;
use std::mem::size_of;
use std::os::linux::fs::MetadataExt as LinuxME;
use std::os::unix::ffi::OsStrExt;
use std::os::unix::fs::MetadataExt as UnixME;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
use std::process::{self, Command};
use std::sync::{Condvar, Mutex};
use std::thread_local;
use std::time::{Duration, UNIX_EPOCH};
use sysinfo::{self, SystemExt};
pub mod json_file;
pub mod storage_info;
pub mod systemd;
pub use json_file::{
JsonArgs, JsonArgsHelper, JsonConfigFile, JsonLoad, JsonRawFile, JsonReportFile, JsonSave,
};
pub use storage_info::*;
pub use systemd::TransientService;
pub const TO_MSEC: f64 = 1000.0;
pub const TO_PCT: f64 = 100.0;
pub const MSEC: f64 = 1.0 / 1000.0;
lazy_static! {
pub static ref TOTAL_MEMORY: usize = {
let mut sys = sysinfo::System::new();
sys.refresh_memory();
sys.get_total_memory() as usize * 1024
};
pub static ref TOTAL_SWAP: usize = {
let mut sys = sysinfo::System::new();
sys.refresh_memory();
sys.get_total_swap() as usize * 1024
};
pub static ref PAGE_SIZE: usize = ::page_size::get();
pub static ref NR_CPUS: usize = ::num_cpus::get();
pub static ref ROTATIONAL_SWAP: bool = storage_info::is_swap_rotational();
pub static ref IS_FB_PROD: bool = {
match glob("/sys/fs/cgroup/**/fbagentd.service")
.unwrap()
.filter_map(|x| x.ok())
.next()
{
Some(_) => {
warn!("FB PROD detected, default parameters will be adjusted");
true
}
None => false,
}
};
}
pub fn to_gb<T>(size: T) -> f64
where
T: num::ToPrimitive,
{
let size_f64 = size.to_f64().unwrap();
size_f64 / (1 << 30) as f64
}
pub fn to_mb<T>(size: T) -> f64
where
T: num::ToPrimitive,
{
let size_f64 = size.to_f64().unwrap();
size_f64 / (1 << 20) as f64
}
pub fn to_kb<T>(size: T) -> f64
where
T: num::ToPrimitive,
{
let size_f64 = size.to_f64().unwrap();
size_f64 / (1 << 10) as f64
}
pub fn scale_ratio<T>(ratio: f64, (left, mid, right): (T, T, T)) -> T
where
T: PartialOrd + num::FromPrimitive + num::ToPrimitive,
{
let (left_f64, mid_f64, right_f64) = (
left.to_f64().unwrap(),
mid.to_f64().unwrap(),
right.to_f64().unwrap(),
);
let v = if ratio < 0.5 {
left_f64 + (mid_f64 - left_f64) * ratio / 0.5
} else {
mid_f64 + (right_f64 - mid_f64) * (ratio - 0.5) / 0.5
};
num::clamp(T::from_f64(v).unwrap(), left, right)
}
pub fn format_size<T>(size: T) -> String
where
T: num::ToPrimitive,
{
fn format_size_helper(size: u64, shift: u32, suffix: &str) -> Option<String> {
let unit: u64 = 1 << shift;
if size < unit {
Some("-".to_string())
} else if size < 100 * unit {
Some(format!("{:.1}{}", size as f64 / unit as f64, suffix))
} else if size < 1024 * unit {
Some(format!("{:}{}", size / unit, suffix))
} else {
None
}
}
let size = size.to_u64().unwrap();
format_size_helper(size, 0, "b")
.or_else(|| format_size_helper(size, 10, "k"))
.or_else(|| format_size_helper(size, 20, "m"))
.or_else(|| format_size_helper(size, 30, "g"))
.or_else(|| format_size_helper(size, 40, "p"))
.or_else(|| format_size_helper(size, 50, "e"))
.unwrap_or_else(|| "INF".into())
}
pub fn format_pct(ratio: f64) -> String {
if ratio == 0.0 {
"-".into()
} else if ratio > 0.99 {
"100".into()
} else {
format!("{:.01}", ratio * 100.0)
}
}
fn is_executable<P: AsRef<Path>>(path_in: P) -> bool {
let path = path_in.as_ref();
match path.metadata() {
Ok(md) => md.is_file() && md.mode() & 0o111 != 0,
Err(_) => false,
}
}
pub fn exe_dir() -> Result<PathBuf> {
let mut path = env::current_exe()?;
path.pop();
Ok(path)
}
pub fn find_bin<N: AsRef<OsStr>, P: AsRef<OsStr>>(
name_in: N,
prepend_in: Option<P>,
) -> Option<PathBuf> {
let name = name_in.as_ref();
let mut search = OsString::new();
if let Some(prepend) = prepend_in.as_ref() {
search.push(prepend);
search.push(":");
}
if let Some(dirs) = env::var_os("PATH") {
search.push(dirs);
}
for dir in env::split_paths(&search) {
let mut path = dir.to_owned();
path.push(name);
if let Ok(path) = path.canonicalize() {
if is_executable(&path) {
return Some(path);
}
}
}
None
}
pub fn chgrp<P: AsRef<Path>>(path_in: P, gid: u32) -> Result<()> {
let path = path_in.as_ref();
let md = fs::metadata(path)?;
if md.st_gid() != gid {
let cpath = CString::new(path.as_os_str().as_bytes())?;
if unsafe { libc::chown(cpath.as_ptr(), md.st_uid(), gid) } < 0 {
bail!("Failed to chgrp {:?} to {} ({:?})", path, gid, unsafe {
*libc::__errno_location()
});
}
}
Ok(())
}
pub fn set_sgid<P: AsRef<Path>>(path_in: P) -> Result<()> {
let path = path_in.as_ref();
let md = fs::metadata(path)?;
let mut perm = md.permissions();
if perm.mode() & 0x2000 == 0 {
perm.set_mode(perm.mode() | 0o2000);
fs::set_permissions(path, perm)?;
}
Ok(())
}
pub fn read_one_line<P: AsRef<Path>>(path: P) -> Result<String> {
let f = fs::OpenOptions::new().read(true).open(path)?;
let r = BufReader::new(f);
Ok(r.lines().next().ok_or(anyhow!("File empty"))??)
}
pub fn write_one_line<P: AsRef<Path>>(path: P, line: &str) -> Result<()> {
let mut f = fs::OpenOptions::new().write(true).open(path)?;
Ok(f.write_all(line.as_ref())?)
}
pub fn unix_now() -> u64 {
UNIX_EPOCH.elapsed().unwrap().as_secs()
}
pub fn init_logging(verbosity: u32) {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
} else {
let sl_level = match verbosity {
0 => sl::LevelFilter::Info,
1 => sl::LevelFilter::Debug,
_ => sl::LevelFilter::Trace,
};
let mut lcfg = sl::ConfigBuilder::new();
lcfg.set_time_level(sl::LevelFilter::Off)
.set_location_level(sl::LevelFilter::Off)
.set_target_level(sl::LevelFilter::Off)
.set_thread_level(sl::LevelFilter::Off);
if let Err(_) = sl::TermLogger::init(sl_level, lcfg.build(), sl::TerminalMode::Stderr) {
sl::SimpleLogger::init(sl_level, lcfg.build()).unwrap();
}
}
}
pub fn child_reader_thread(name: String, stdout: process::ChildStdout, tx: Sender<String>) {
let reader = BufReader::new(stdout);
for line in reader.lines() {
match line {
Ok(line) => {
if let Err(e) = tx.send(line) {
info!("{}: Reader thread terminating ({:?})", &name, &e);
break;
}
}
Err(e) => {
warn!("{}: Failed to read from journalctl ({:?})", &name, &e);
break;
}
}
}
}
pub fn run_command(cmd: &mut Command, emsg: &str) -> Result<()> {
let cmd_str = format!("{:?}", &cmd);
match cmd.status() {
Ok(rc) if rc.success() => Ok(()),
Ok(rc) => bail!("{:?} ({:?}): {}", &cmd_str, &rc, emsg,),
Err(e) => bail!("{:?} ({:?}): {}", &cmd_str, &e, emsg,),
}
}
pub fn fill_area_with_random<T, R: rand::Rng + ?Sized>(area: &mut [T], comp: f64, rng: &mut R) {
let area = unsafe {
std::slice::from_raw_parts_mut(
std::mem::transmute::<*mut T, *mut u64>(area.as_mut_ptr()),
area.len() * size_of::<T>() / size_of::<u64>(),
)
};
const BLOCK_SIZE: usize = 512;
const WORDS_PER_BLOCK: usize = BLOCK_SIZE / size_of::<u64>();
let rands_per_block = (((WORDS_PER_BLOCK as f64) * (1.0 - comp)) as usize).min(WORDS_PER_BLOCK);
let last_first = area[0];
for i in 0..area.len() {
area[i] = if i % WORDS_PER_BLOCK < rands_per_block {
rng.gen()
} else {
0
};
}
// guarantee that the first word doesn't stay the same
if area[0] == last_first {
area[0] += 1;
}
}
struct GlobalProgState {
exiting: bool,
kick_seq: u64,
}
lazy_static! {
static ref PROG_STATE: Mutex<GlobalProgState> = Mutex::new(GlobalProgState {
exiting: false,
kick_seq: 1
});
static ref PROG_WAITQ: Condvar = Condvar::new();
}
thread_local! {
static LOCAL_KICK_SEQ: RefCell<u64> = RefCell::new(0);
}
pub fn setup_prog_state() {
ctrlc::set_handler(move || {
info!("SIGINT/TERM received, exiting...");
set_prog_exiting();
})
.expect("Error setting term handler");
}
pub fn set_prog_exiting() {
PROG_STATE.lock().unwrap().exiting = true;
PROG_WAITQ.notify_all();
}
pub fn prog_exiting() -> bool {
PROG_STATE.lock().unwrap().exiting
}
pub fn prog_kick() {
PROG_STATE.lock().unwrap().kick_seq += 1;
PROG_WAITQ.notify_all();
}
#[derive(PartialEq, Eq)]
pub enum ProgState {
Running,
Exiting,
Kicked,
}
pub fn wait_prog_state(dur: Duration) -> ProgState {
let mut first = true;
let mut state = PROG_STATE.lock().unwrap();
loop {
if state.exiting {
return ProgState::Exiting;
}
if LOCAL_KICK_SEQ.with(|seq| {
if *seq.borrow() < state.kick_seq {
*seq.borrow_mut() = state.kick_seq;
true
} else {
false
}
}) {
return ProgState::Kicked;
}
if first {
state = PROG_WAITQ.wait_timeout(state, dur).unwrap().0;
first = false;
} else {
return ProgState::Running;
}
}
}
|
use std::fs;
fn main() {
let mut program = fs::read_to_string("input.txt")
.unwrap()
.trim()
.split(",")
.map(|x| x.parse::<u32>().unwrap())
.collect::<Vec<u32>>();
program[1] = 12;
program[2] = 2;
// for x in &program {
// println!("{}", x);
// }
let mut i: usize = 0;
while i < program.len() {
let opcode = program[i];
i+= 1;
// println!("index = {}, opcode = {}", i, opcode);
if opcode == 99 {
break;
}
if opcode == 1 || opcode == 2 {
let op1 = program[program[i] as usize];
let op2 = program[program[i + 1] as usize];
let result_pos = program[i + 2] as usize;
i += 3;
// println!("op1 = {}, op2 = {}, result_pos = {}", op1, op2, result_pos);
if opcode == 1 {
program[result_pos] = op1 + op2;
println!("{} <- {} + {} = {}", result_pos, op1, op2, op1 + op2);
} else if opcode == 2 {
program[result_pos] = op1 * op2;
println!("{} <- {} * {} = {}", result_pos, op1, op2, op1 * op2);
}
}
}
println!("Result: {}", program[0]);
}
|
use serde::{Deserialize, Serialize};
use super::audio_status_code;
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Audit {
status: Option<audio_status_code::AudioStatusCode>,
feed_back: Vec<String>,
init: Option<i32>,
lastmod: Option<i32>,
corr: Option<Correction>,
ext: Option<AuditExt>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct AuditExt {}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Correction {}
|
pub mod grand_product;
pub mod permutation;
pub mod well_formed;
pub mod s_perm;
|
use crate::{
custom_client::MedalCount,
database::OsuData,
embeds::{EmbedData, MedalCountEmbed},
pagination::{MedalCountPagination, Pagination},
util::{constants::OSEKAI_ISSUE, numbers, InteractionExt, MessageExt},
BotResult, Context,
};
use eyre::Report;
use std::sync::Arc;
use twilight_model::application::interaction::ApplicationCommand;
pub(super) async fn medal_count(ctx: Arc<Context>, command: ApplicationCommand) -> BotResult<()> {
let owner = command.user_id()?;
let osu_fut = ctx.psql().get_user_osu(owner);
let osekai_fut = ctx.clients.custom.get_osekai_ranking::<MedalCount>();
let (ranking, author_name) = match tokio::join!(osekai_fut, osu_fut) {
(Ok(ranking), Ok(osu)) => (ranking, osu.map(OsuData::into_username)),
(Ok(ranking), Err(why)) => {
let report = Report::new(why).wrap_err("failed to retrieve user config");
warn!("{:?}", report);
(ranking, None)
}
(Err(why), _) => {
let _ = command.error(&ctx, OSEKAI_ISSUE).await;
return Err(why.into());
}
};
let author_idx = author_name.as_deref().and_then(|name| {
ranking
.iter()
.position(|entry| entry.username.as_str() == name)
});
let pages = numbers::div_euclid(10, ranking.len());
let embed_data = MedalCountEmbed::new(&ranking[..10], 0, author_idx, (1, pages));
let builder = embed_data.into_builder().build().into();
let response = command.create_message(&ctx, builder).await?.model().await?;
let pagination = MedalCountPagination::new(response, ranking, author_idx);
tokio::spawn(async move {
if let Err(err) = pagination.start(&ctx, owner, 60).await {
warn!("{:?}", Report::new(err));
}
});
Ok(())
}
|
use core::future::Future;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[non_exhaustive]
pub enum Error {
Other,
}
pub trait Read {
type ReadFuture<'a>: Future<Output = Result<(), Error>>
where
Self: 'a;
fn read<'a>(&'a mut self, buf: &'a mut [u8]) -> Self::ReadFuture<'a>;
}
pub trait ReadUntilIdle {
type ReadUntilIdleFuture<'a>: Future<Output = Result<usize, Error>>
where
Self: 'a;
/// Receive into the buffer until the buffer is full or the line is idle after some bytes are received
/// Return the number of bytes received
fn read_until_idle<'a>(&'a mut self, buf: &'a mut [u8]) -> Self::ReadUntilIdleFuture<'a>;
}
pub trait Write {
type WriteFuture<'a>: Future<Output = Result<(), Error>>
where
Self: 'a;
fn write<'a>(&'a mut self, buf: &'a [u8]) -> Self::WriteFuture<'a>;
}
|
//! Module defining the initial page of the application.
//!
//! It contains elements to select network adapter and traffic filters.
use iced::widget::scrollable::Direction;
use iced::widget::tooltip::Position;
use iced::widget::{
button, horizontal_space, vertical_space, Button, Column, Container, PickList, Row, Scrollable,
Text, Tooltip,
};
use iced::Length::FillPortion;
use iced::{alignment, Alignment, Font, Length, Renderer};
use pcap::Device;
use crate::gui::components::radio::{ip_version_radios, transport_protocol_radios};
use crate::gui::styles::button::ButtonType;
use crate::gui::styles::container::ContainerType;
use crate::gui::styles::scrollbar::ScrollbarType;
use crate::gui::styles::style_constants::{get_font, FONT_SIZE_SUBTITLE, FONT_SIZE_TITLE};
use crate::gui::styles::text::TextType;
use crate::gui::styles::types::gradient_type::GradientType;
use crate::gui::types::message::Message;
use crate::gui::types::sniffer::Sniffer;
use crate::translations::translations::{
address_translation, addresses_translation, all_translation, application_protocol_translation,
choose_adapters_translation, select_filters_translation, start_translation,
};
use crate::utils::types::icon::Icon;
use crate::{AppProtocol, Language, StyleType};
/// Computes the body of gui initial page
pub fn initial_page(sniffer: &Sniffer) -> Container<Message, Renderer<StyleType>> {
let font = get_font(sniffer.style);
let col_adapter = get_col_adapter(sniffer, font);
let ip_active = sniffer.filters.ip;
let col_ip_radio = ip_version_radios(ip_active, font, sniffer.language);
let col_ip = Column::new()
.spacing(10)
.width(FillPortion(5))
.push(col_ip_radio);
let transport_active = sniffer.filters.transport;
let col_transport_radio = transport_protocol_radios(transport_active, font, sniffer.language);
let col_transport = Column::new()
.align_items(Alignment::Center)
.spacing(10)
.width(FillPortion(9))
.push(col_transport_radio)
.push(vertical_space(FillPortion(2)))
.push(button_start(font, sniffer.language, sniffer.color_gradient))
.push(vertical_space(FillPortion(1)));
let app_active = if sniffer.filters.application.ne(&AppProtocol::Other) {
Some(sniffer.filters.application)
} else {
None
};
let picklist_app = PickList::new(
if app_active.is_some() {
&AppProtocol::ALL[..]
} else {
&AppProtocol::ALL[1..]
},
app_active,
Message::AppProtocolSelection,
)
.padding([3, 7])
.placeholder(all_translation(sniffer.language))
.font(font);
let col_app = Column::new()
.width(FillPortion(8))
.spacing(10)
.push(
Text::new(application_protocol_translation(sniffer.language))
.font(font)
.style(TextType::Subtitle)
.size(FONT_SIZE_SUBTITLE),
)
.push(picklist_app);
let filters = Column::new()
.width(FillPortion(6))
.padding(10)
.spacing(15)
.push(
Row::new().push(
select_filters_translation(sniffer.language)
.font(font)
.style(TextType::Title)
.size(FONT_SIZE_TITLE),
),
)
.push(
Row::new()
.spacing(10)
.height(FillPortion(3))
.push(col_ip)
.push(col_transport)
.push(col_app),
);
let body = Column::new().push(vertical_space(Length::Fixed(5.0))).push(
Row::new()
.push(col_adapter)
.push(horizontal_space(Length::Fixed(30.0)))
.push(filters),
);
Container::new(body).height(Length::Fill)
}
fn button_start(
font: Font,
language: Language,
color_gradient: GradientType,
) -> Tooltip<'static, Message, Renderer<StyleType>> {
let content = button(
Icon::Rocket
.to_text()
.size(25)
.horizontal_alignment(alignment::Horizontal::Center)
.vertical_alignment(alignment::Vertical::Center),
)
.padding(10)
.height(Length::Fixed(80.0))
.width(Length::Fixed(160.0))
.style(ButtonType::Gradient(color_gradient))
.on_press(Message::Start);
let tooltip = start_translation(language).to_string();
//tooltip.push_str(" [⏎]");
Tooltip::new(content, tooltip, Position::Top)
.gap(5)
.font(font)
.style(ContainerType::Tooltip)
}
fn get_col_adapter(sniffer: &Sniffer, font: Font) -> Column<Message, Renderer<StyleType>> {
let mut dev_str_list = vec![];
for dev in Device::list().expect("Error retrieving device list\r\n") {
let mut dev_str = String::new();
let name = dev.name;
match dev.desc {
None => {
dev_str.push_str(&name);
}
Some(description) => {
#[cfg(not(target_os = "windows"))]
dev_str.push_str(&format!("{name}\n"));
dev_str.push_str(&description);
}
}
let num_addresses = dev.addresses.len();
match num_addresses {
0 => {}
1 => {
dev_str.push_str(&format!("\n{}:", address_translation(sniffer.language)));
}
_ => {
dev_str.push_str(&format!("\n{}:", addresses_translation(sniffer.language)));
}
}
for addr in dev.addresses {
let address_string = addr.addr.to_string();
dev_str.push_str(&format!("\n {address_string}"));
}
dev_str_list.push((name, dev_str));
}
Column::new()
.padding(10)
.spacing(5)
.height(Length::Fill)
.width(FillPortion(4))
.push(
choose_adapters_translation(sniffer.language)
.font(font)
.style(TextType::Title)
.size(FONT_SIZE_TITLE),
)
.push(
Scrollable::new(dev_str_list.iter().fold(
Column::new().padding(13).spacing(5),
|scroll_adapters, adapter| {
let name = adapter.0.clone();
let description = adapter.1.clone();
scroll_adapters.push(
Button::new(Text::new(description).font(font))
.padding([20, 30])
.width(Length::Fill)
.style(if name == sniffer.device.name {
ButtonType::BorderedRoundSelected
} else {
ButtonType::BorderedRound
})
.on_press(Message::AdapterSelection(name)),
)
},
))
.direction(Direction::Vertical(ScrollbarType::properties())),
)
}
|
extern crate rand;
pub mod math;
|
use nextcloud_config_parser::parse;
fn main() {
let config = match parse("tests/configs/basic.php") {
Ok(config) => config,
Err(err) => {
eprintln!("{}", err);
return;
}
};
dbg!(config);
}
|
use std::{env, fs::File, io::{Read, Write}, path::Path};
use positioned_io::WriteAt;
static BOOTLOADER: &str = "BootLoader.bin";
static KERNEL32: &str = "Kernelx86.bin";
static KERNEL64: &str = "Kernelx64.bin";
static DISK: &str = "Disk.img";
#[inline]
fn u16_to_u8(v: u16) -> [u8; 2] {
[
v as u8,
(v >> 8) as u8,
]
}
fn main() {
let manifest_dir_path =
env::var("CARGO_MANIFEST_DIR").expect("Missing CARGO_MANIFEST_DIR environment variable");
let manifest_dir = Path::new(&manifest_dir_path);
let current_dir = env::current_dir().expect("Couldn't get current directory");
let target_dir_rel = manifest_dir.join("target");
let target_dir = current_dir.join(target_dir_rel);
let mut output = File::create(format!("{}/{}", current_dir.display(), DISK)).unwrap();
let sector_counts = merge_kernel(vec![
&format!("{}/{}", target_dir.display(), BOOTLOADER),
&format!("{}/{}", target_dir.display(), KERNEL32),
&format!("{}/{}", target_dir.display(), KERNEL64),
], &mut output);
let bootloader_sector = sector_counts[0];
let kernel32_sector = sector_counts[1];
let kernel64_sector = sector_counts[2];
println!("----------------FILE LIST------------------");
println!("BootLoader : {:2} Sector(s), Offset [{:#04x}]", bootloader_sector, 0);
println!("32-bit Kernel : {:2} Sector(s), Offset [{:#04x}]", kernel32_sector, bootloader_sector);
println!("64-bit Kernel : {:2} Sector(s), Offset [{:#04x}]", kernel64_sector, bootloader_sector + kernel32_sector);
println!("----------------SUB TOTAL------------------");
println!("Total Kernel : {:2} Sector(s)", kernel32_sector + kernel64_sector);
println!("------------------TOTAL--------------------");
println!("Total : {:2} Sector(s)", bootloader_sector + kernel32_sector + kernel64_sector);
output.write_at(5, &u16_to_u8(kernel32_sector + kernel64_sector)).unwrap();
output.write_at(7, &u16_to_u8(kernel32_sector)).unwrap();
}
fn merge_kernel(
file_list: Vec<&str>,
output: &mut File
) -> Vec<u16>{
let mut buffer = [0u8; 512];
let mut sector_count: Vec<u16> = Vec::new();
for file_name in file_list {
let mut file = File::open(file_name).unwrap();
let mut count: u16 = 0;
loop {
let nbyte = file.read(&mut buffer).unwrap();
if nbyte == 0 { break; }
for i in nbyte..512 {
buffer[i] = 0;
}
output.write(&buffer).unwrap();
count += 1;
if nbyte < buffer.len() { break; }
}
sector_count.push(count);
}
sector_count
} |
#![allow(clippy::forget_non_drop)]
// This example exists to allow for profiling
// applications to provide details about
// the criterion benchmarks
use ress::Tokenizer;
static COMMENTS: &[&str] = &[
"//this is a comment",
"/*this is a
multi-line comment*/",
"<!-- This is an HTML comment -->",
"<!-- This is an HTML comment --> with a trailer",
];
fn main() {
for _ in 0..1000 {
for c in COMMENTS {
let d = Tokenizer::new(c).next(true).unwrap();
core::mem::forget(d);
}
}
}
|
use std::io::{stdin, Read, StdinLock};
use std::str::FromStr;
#[allow(dead_code)]
struct Scanner<'a> {
cin: StdinLock<'a>,
}
#[allow(dead_code)]
impl<'a> Scanner<'a> {
fn new(cin: StdinLock<'a>) -> Scanner<'a> {
Scanner { cin: cin }
}
fn read<T: FromStr>(&mut self) -> Option<T> {
let token = self.cin.by_ref().bytes().map(|c| c.unwrap() as char)
.skip_while(|c| c.is_whitespace())
.take_while(|c| !c.is_whitespace())
.collect::<String>();
token.parse::<T>().ok()
}
fn input<T: FromStr>(&mut self) -> T {
self.read().unwrap()
}
}
fn main() {
let cin = stdin();
let cin = cin.lock();
let mut sc = Scanner::new(cin);
let s: String = sc.input();
let mut chs: Vec<char> = s.chars().collect();
chs[3] = '8';
let ans: String = chs.into_iter().collect();
println!("{}", ans);
}
|
pub const CONFIG_FILENAME: &str = "config.toml";
pub const CONTENT_TYPE_JSON: &str = "application/json";
pub const ERR_MSG_PAYLOAD_PARSE_ORDERBY_FAIL: &str = "parse orderby list fail";
pub const ERR_MSG_PAYLOAD_PARSE_TIME_COND_FAIL: &str = "parse time condition fail";
|
use amethyst::{
assets::ProgressCounter,
core::{nalgebra::Vector2, transform::components::Transform},
ecs::prelude::*,
renderer::{Transparent,Flipped},
shrev::EventChannel,
};
use crate::{
components::{
for_characters::{player::Position, Engine, FuelTank, TagGenerator},
physics::{Dynamics, PhysicalProperties},
IsIngameEntity,
},
entities::{camera, player_parts::DrillTypes, EntityError, EntitySpriteRender},
events::planet_events::ChunkEvent,
resources::{
add_spriterender, get_spriterender, ingame::GameSessionData, GameSprites, RenderConfig,
ToppaSpriteSheet,
},
utilities::{load_spritesheet_tracked, load_spritesheet},
};
use super::{init_drill, init_tracks, new_drill, new_tracks, PlayerParts};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum PlayerError {
#[allow(dead_code)]
NotImplemented,
NoPositionFromTransform,
MissingSpriteRender(ShipTypes,),
}
/// The hull of the ship, provides resistance against forces and impacts.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum ShipTypes {
/// Dummy, if a drill has no implementation yet.
NotImplemented,
/// Base-model every player starts with, low resistance against all.
Mk1506,
/// Mid tier model with moderate resistance against heat, forces and impact.
Albatros,
/// Highest end model, providing the highest resistance against heat, forces and impact.
L14Ultra,
}
/// TODO: Error handling
/// Loads the spritesheet and sprites for the player, adding them to GameSprites.
/// Calls the `init`-functions of all the player sub-entities, like the drill and tracks.
pub fn init_player(world: &mut World, progress_counter_ref_opt: Option<&mut ProgressCounter,>,) {
// TODO: Not happy with this if-let for duplicating an Option<&mut ProgressCounter>
if let Some(progress_counter_ref,) = progress_counter_ref_opt {
// TODO: For moddability, not hardcoded path! Check some dir first, and fall back on hardcoded path if nothng is found.
{
let ss_handle = load_spritesheet_tracked(
world,
"Assets/Textures/Drill".to_string(),
progress_counter_ref,
);
let mut game_sprites = world.write_resource::<GameSprites>();
let sprites = [
(
0,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::NotImplemented,),),
),
/*(
1,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::Mk1506)),
),
(
2,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::Albatros)),
),
(
3,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::L14Ultra)),
),*/
];
for (sprite_number, entity_sprite_render,) in sprites.iter() {
add_spriterender(
*entity_sprite_render,
&mut game_sprites,
ss_handle.clone(),
*sprite_number,
);
}
}
init_drill(world, Some(progress_counter_ref,),);
init_tracks(world, Some(progress_counter_ref,),);
}
else {
{
let ss_handle =
load_spritesheet(world, "Assets/Textures/Drill".to_string());
let mut game_sprites = world.write_resource::<GameSprites>();
let sprites = [
(
0,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::NotImplemented,),),
),
/*(
1,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::Mk1506)),
),
(
2,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::Albatros)),
),
(
3,
EntitySpriteRender::Player(PlayerParts::Ship(ShipTypes::L14Ultra)),
),*/
];
for (sprite_number, entity_sprite_render,) in sprites.iter() {
add_spriterender(
*entity_sprite_render,
&mut game_sprites,
ss_handle.clone(),
*sprite_number,
);
}
}
init_drill(world, None,);
init_tracks(world, None,);
}
}
/// Creates a new player and returns his ID.
/// If `0`(Zero) is returned, the player has not been created.
/// Also loads the chunk the player stands on.
pub fn new_player(
world: &mut World,
transform: &Transform,
ship_type: ShipTypes,
) -> Result<(), EntityError,> {
#[cfg(feature = "debug")]
debug!("Creating player with ship type {:?}.", ship_type);
let sprite_render_opt = get_spriterender(
world,
EntitySpriteRender::Player(PlayerParts::Ship(ship_type,),),
);
if let Some(sprite_render,) = sprite_render_opt {
let player_tag = {
let mut tag_resource = world.write_resource::<TagGenerator>();
tag_resource.new_player_tag()
};
let (position, view_dim, _chunk_render_distance,) = {
let ren_con = &world.read_resource::<RenderConfig>();
let planet = &world.read_resource::<GameSessionData>().planet;
(
Position::default(),
ren_con.view_dim,
ren_con.chunk_render_distance,
)
};
let physical_properties = PhysicalProperties::new(7000.0, Some(1000.0,), None, Some(125.0,),);
let dynamics = Dynamics::default();
let engine = Engine::new(Vector2::new(7200000.0, 4260000.0), 0.90, 0.0001,);
let fuel_tank = FuelTank::new(50000.0, 50000.0, 0.002);
#[cfg(feature = "debug")]
debug!("| Initial player position from transform.");
let player = world
.create_entity()
.with(IsIngameEntity,)
.with(transform.clone(),)
.with(Transparent,)
.with(sprite_render,)
.with(player_tag,)
.with(position,)
.with(physical_properties,)
.with(dynamics,)
.with(engine,)
.with(fuel_tank,)
.with(Flipped::Vertical) //What's wrong with spritesheet prefabs? Are they only upside down? Is the offset changed?
.build();
camera::init_camera(world, view_dim, player,);
new_drill(world, player, DrillTypes::C45U,)?;
new_tracks(world, player,)?;
Ok((),)
}
else {
Err(EntityError::PlayerProblem(
PlayerError::MissingSpriteRender(ship_type,),
),)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.