repo_id
stringclasses
563 values
file_path
stringlengths
40
166
content
stringlengths
1
2.94M
__index_level_0__
int64
0
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libserialize/json.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Rust JSON serialization library // Copyright (c) 2011 Google Inc. #![forbid(non_camel_case_types)] #![allow(missing_docs)] //! JSON parsing and serialization //! //! # What is JSON? //! //! JSON (JavaScript Object Notation) is a way to write data in Javascript. //! Like XML, it allows to encode structured data in a text format that can be easily read by humans //! Its simple syntax and native compatibility with JavaScript have made it a widely used format. //! //! Data types that can be encoded are JavaScript types (see the `Json` enum for more details): //! //! * `Boolean`: equivalent to rust's `bool` //! * `Number`: equivalent to rust's `f64` //! * `String`: equivalent to rust's `String` //! * `Array`: equivalent to rust's `Vec<T>`, but also allowing objects of different types in the //! same array //! * `Object`: equivalent to rust's `BTreeMap<String, json::Json>` //! * `Null` //! //! An object is a series of string keys mapping to values, in `"key": value` format. //! Arrays are enclosed in square brackets ([ ... ]) and objects in curly brackets ({ ... }). //! A simple JSON document encoding a person, their age, address and phone numbers could look like //! //! ```json //! { //! "FirstName": "John", //! "LastName": "Doe", //! "Age": 43, //! "Address": { //! "Street": "Downing Street 10", //! "City": "London", //! "Country": "Great Britain" //! }, //! "PhoneNumbers": [ //! "+44 1234567", //! "+44 2345678" //! ] //! } //! ``` //! //! # Rust Type-based Encoding and Decoding //! //! Rust provides a mechanism for low boilerplate encoding & decoding of values to and from JSON via //! the serialization API. //! To be able to encode a piece of data, it must implement the `serialize::RustcEncodable` trait. //! To be able to decode a piece of data, it must implement the `serialize::RustcDecodable` trait. //! The Rust compiler provides an annotation to automatically generate the code for these traits: //! `#[derive(RustcDecodable, RustcEncodable)]` //! //! The JSON API provides an enum `json::Json` and a trait `ToJson` to encode objects. //! The `ToJson` trait provides a `to_json` method to convert an object into a `json::Json` value. //! A `json::Json` value can be encoded as a string or buffer using the functions described above. //! You can also use the `json::Encoder` object, which implements the `Encoder` trait. //! //! When using `ToJson` the `RustcEncodable` trait implementation is not mandatory. //! //! # Examples of use //! //! ## Using Autoserialization //! //! Create a struct called `TestStruct` and serialize and deserialize it to and from JSON using the //! serialization API, using the derived serialization code. //! //! ```rust //! # #![feature(rustc_private)] //! extern crate serialize as rustc_serialize; // for the deriving below //! use rustc_serialize::json; //! //! // Automatically generate `Decodable` and `Encodable` trait implementations //! #[derive(RustcDecodable, RustcEncodable)] //! pub struct TestStruct { //! data_int: u8, //! data_str: String, //! data_vector: Vec<u8>, //! } //! //! fn main() { //! let object = TestStruct { //! data_int: 1, //! data_str: "homura".to_string(), //! data_vector: vec![2,3,4,5], //! }; //! //! // Serialize using `json::encode` //! let encoded = json::encode(&object).unwrap(); //! //! // Deserialize using `json::decode` //! let decoded: TestStruct = json::decode(&encoded[..]).unwrap(); //! } //! ``` //! //! ## Using the `ToJson` trait //! //! The examples above use the `ToJson` trait to generate the JSON string, which is required //! for custom mappings. //! //! ### Simple example of `ToJson` usage //! //! ```rust //! # #![feature(rustc_private)] //! extern crate serialize; //! use serialize::json::{self, ToJson, Json}; //! //! // A custom data structure //! struct ComplexNum { //! a: f64, //! b: f64, //! } //! //! // JSON value representation //! impl ToJson for ComplexNum { //! fn to_json(&self) -> Json { //! Json::String(format!("{}+{}i", self.a, self.b)) //! } //! } //! //! // Only generate `RustcEncodable` trait implementation //! #[derive(Encodable)] //! pub struct ComplexNumRecord { //! uid: u8, //! dsc: String, //! val: Json, //! } //! //! fn main() { //! let num = ComplexNum { a: 0.0001, b: 12.539 }; //! let data: String = json::encode(&ComplexNumRecord{ //! uid: 1, //! dsc: "test".to_string(), //! val: num.to_json(), //! }).unwrap(); //! println!("data: {}", data); //! // data: {"uid":1,"dsc":"test","val":"0.0001+12.539i"}; //! } //! ``` //! //! ### Verbose example of `ToJson` usage //! //! ```rust //! # #![feature(rustc_private)] //! extern crate serialize; //! use std::collections::BTreeMap; //! use serialize::json::{self, Json, ToJson}; //! //! // Only generate `Decodable` trait implementation //! #[derive(Decodable)] //! pub struct TestStruct { //! data_int: u8, //! data_str: String, //! data_vector: Vec<u8>, //! } //! //! // Specify encoding method manually //! impl ToJson for TestStruct { //! fn to_json(&self) -> Json { //! let mut d = BTreeMap::new(); //! // All standard types implement `to_json()`, so use it //! d.insert("data_int".to_string(), self.data_int.to_json()); //! d.insert("data_str".to_string(), self.data_str.to_json()); //! d.insert("data_vector".to_string(), self.data_vector.to_json()); //! Json::Object(d) //! } //! } //! //! fn main() { //! // Serialize using `ToJson` //! let input_data = TestStruct { //! data_int: 1, //! data_str: "madoka".to_string(), //! data_vector: vec![2,3,4,5], //! }; //! let json_obj: Json = input_data.to_json(); //! let json_str: String = json_obj.to_string(); //! //! // Deserialize like before //! let decoded: TestStruct = json::decode(&json_str).unwrap(); //! } //! ``` use self::JsonEvent::*; use self::ErrorCode::*; use self::ParserError::*; use self::DecoderError::*; use self::ParserState::*; use self::InternalStackElement::*; use std::borrow::Cow; use std::collections::{HashMap, BTreeMap}; use std::io::prelude::*; use std::io; use std::mem::swap; use std::num::FpCategory as Fp; use std::ops::Index; use std::str::FromStr; use std::string; use std::{char, f64, fmt, str}; use std; use Encodable; /// Represents a json value #[derive(Clone, PartialEq, PartialOrd, Debug)] pub enum Json { I64(i64), U64(u64), F64(f64), String(string::String), Boolean(bool), Array(self::Array), Object(self::Object), Null, } pub type Array = Vec<Json>; pub type Object = BTreeMap<string::String, Json>; pub struct PrettyJson<'a> { inner: &'a Json } pub struct AsJson<'a, T: 'a> { inner: &'a T } pub struct AsPrettyJson<'a, T: 'a> { inner: &'a T, indent: Option<usize> } /// The errors that can arise while parsing a JSON stream. #[derive(Clone, Copy, PartialEq, Debug)] pub enum ErrorCode { InvalidSyntax, InvalidNumber, EOFWhileParsingObject, EOFWhileParsingArray, EOFWhileParsingValue, EOFWhileParsingString, KeyMustBeAString, ExpectedColon, TrailingCharacters, TrailingComma, InvalidEscape, InvalidUnicodeCodePoint, LoneLeadingSurrogateInHexEscape, UnexpectedEndOfHexEscape, UnrecognizedHex, NotFourDigit, NotUtf8, } #[derive(Clone, PartialEq, Debug)] pub enum ParserError { /// msg, line, col SyntaxError(ErrorCode, usize, usize), IoError(io::ErrorKind, String), } // Builder and Parser have the same errors. pub type BuilderError = ParserError; #[derive(Clone, PartialEq, Debug)] pub enum DecoderError { ParseError(ParserError), ExpectedError(string::String, string::String), MissingFieldError(string::String), UnknownVariantError(string::String), ApplicationError(string::String) } #[derive(Copy, Clone, Debug)] pub enum EncoderError { FmtError(fmt::Error), BadHashmapKey, } /// Returns a readable error string for a given error code. pub fn error_str(error: ErrorCode) -> &'static str { match error { InvalidSyntax => "invalid syntax", InvalidNumber => "invalid number", EOFWhileParsingObject => "EOF While parsing object", EOFWhileParsingArray => "EOF While parsing array", EOFWhileParsingValue => "EOF While parsing value", EOFWhileParsingString => "EOF While parsing string", KeyMustBeAString => "key must be a string", ExpectedColon => "expected `:`", TrailingCharacters => "trailing characters", TrailingComma => "trailing comma", InvalidEscape => "invalid escape", UnrecognizedHex => "invalid \\u{ esc}ape (unrecognized hex)", NotFourDigit => "invalid \\u{ esc}ape (not four digits)", NotUtf8 => "contents not utf-8", InvalidUnicodeCodePoint => "invalid Unicode code point", LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape", UnexpectedEndOfHexEscape => "unexpected end of hex escape", } } /// Shortcut function to decode a JSON `&str` into an object pub fn decode<T: ::Decodable>(s: &str) -> DecodeResult<T> { let json = match from_str(s) { Ok(x) => x, Err(e) => return Err(ParseError(e)) }; let mut decoder = Decoder::new(json); ::Decodable::decode(&mut decoder) } /// Shortcut function to encode a `T` into a JSON `String` pub fn encode<T: ::Encodable>(object: &T) -> Result<string::String, EncoderError> { let mut s = String::new(); { let mut encoder = Encoder::new(&mut s); object.encode(&mut encoder)?; } Ok(s) } impl fmt::Display for ErrorCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { error_str(*self).fmt(f) } } fn io_error_to_error(io: io::Error) -> ParserError { IoError(io.kind(), io.to_string()) } impl fmt::Display for ParserError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // FIXME this should be a nicer error fmt::Debug::fmt(self, f) } } impl fmt::Display for DecoderError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // FIXME this should be a nicer error fmt::Debug::fmt(self, f) } } impl std::error::Error for DecoderError { fn description(&self) -> &str { "decoder error" } } impl fmt::Display for EncoderError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // FIXME this should be a nicer error fmt::Debug::fmt(self, f) } } impl std::error::Error for EncoderError { fn description(&self) -> &str { "encoder error" } } impl From<fmt::Error> for EncoderError { /// Converts a [`fmt::Error`] into `EncoderError` /// /// This conversion does not allocate memory. fn from(err: fmt::Error) -> EncoderError { EncoderError::FmtError(err) } } pub type EncodeResult = Result<(), EncoderError>; pub type DecodeResult<T> = Result<T, DecoderError>; fn escape_str(wr: &mut dyn fmt::Write, v: &str) -> EncodeResult { wr.write_str("\"")?; let mut start = 0; for (i, byte) in v.bytes().enumerate() { let escaped = match byte { b'"' => "\\\"", b'\\' => "\\\\", b'\x00' => "\\u0000", b'\x01' => "\\u0001", b'\x02' => "\\u0002", b'\x03' => "\\u0003", b'\x04' => "\\u0004", b'\x05' => "\\u0005", b'\x06' => "\\u0006", b'\x07' => "\\u0007", b'\x08' => "\\b", b'\t' => "\\t", b'\n' => "\\n", b'\x0b' => "\\u000b", b'\x0c' => "\\f", b'\r' => "\\r", b'\x0e' => "\\u000e", b'\x0f' => "\\u000f", b'\x10' => "\\u0010", b'\x11' => "\\u0011", b'\x12' => "\\u0012", b'\x13' => "\\u0013", b'\x14' => "\\u0014", b'\x15' => "\\u0015", b'\x16' => "\\u0016", b'\x17' => "\\u0017", b'\x18' => "\\u0018", b'\x19' => "\\u0019", b'\x1a' => "\\u001a", b'\x1b' => "\\u001b", b'\x1c' => "\\u001c", b'\x1d' => "\\u001d", b'\x1e' => "\\u001e", b'\x1f' => "\\u001f", b'\x7f' => "\\u007f", _ => { continue; } }; if start < i { wr.write_str(&v[start..i])?; } wr.write_str(escaped)?; start = i + 1; } if start != v.len() { wr.write_str(&v[start..])?; } wr.write_str("\"")?; Ok(()) } fn escape_char(writer: &mut dyn fmt::Write, v: char) -> EncodeResult { escape_str(writer, v.encode_utf8(&mut [0; 4])) } fn spaces(wr: &mut dyn fmt::Write, mut n: usize) -> EncodeResult { const BUF: &str = " "; while n >= BUF.len() { wr.write_str(BUF)?; n -= BUF.len(); } if n > 0 { wr.write_str(&BUF[..n])?; } Ok(()) } fn fmt_number_or_null(v: f64) -> string::String { match v.classify() { Fp::Nan | Fp::Infinite => string::String::from("null"), _ if v.fract() != 0f64 => v.to_string(), _ => v.to_string() + ".0", } } /// A structure for implementing serialization to JSON. pub struct Encoder<'a> { writer: &'a mut (dyn fmt::Write+'a), is_emitting_map_key: bool, } impl<'a> Encoder<'a> { /// Creates a new JSON encoder whose output will be written to the writer /// specified. pub fn new(writer: &'a mut dyn fmt::Write) -> Encoder<'a> { Encoder { writer: writer, is_emitting_map_key: false, } } } macro_rules! emit_enquoted_if_mapkey { ($enc:ident,$e:expr) => ({ if $enc.is_emitting_map_key { write!($enc.writer, "\"{}\"", $e)?; } else { write!($enc.writer, "{}", $e)?; } Ok(()) }) } impl<'a> ::Encoder for Encoder<'a> { type Error = EncoderError; fn emit_nil(&mut self) -> EncodeResult { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, "null")?; Ok(()) } fn emit_usize(&mut self, v: usize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u128(&mut self, v: u128) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u64(&mut self, v: u64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u32(&mut self, v: u32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u16(&mut self, v: u16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u8(&mut self, v: u8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_isize(&mut self, v: isize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i128(&mut self, v: i128) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i64(&mut self, v: i64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i32(&mut self, v: i32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i16(&mut self, v: i16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i8(&mut self, v: i8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_bool(&mut self, v: bool) -> EncodeResult { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if v { write!(self.writer, "true")?; } else { write!(self.writer, "false")?; } Ok(()) } fn emit_f64(&mut self, v: f64) -> EncodeResult { emit_enquoted_if_mapkey!(self, fmt_number_or_null(v)) } fn emit_f32(&mut self, v: f32) -> EncodeResult { self.emit_f64(v as f64) } fn emit_char(&mut self, v: char) -> EncodeResult { escape_char(self.writer, v) } fn emit_str(&mut self, v: &str) -> EncodeResult { escape_str(self.writer, v) } fn emit_enum<F>(&mut self, _name: &str, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { f(self) } fn emit_enum_variant<F>(&mut self, name: &str, _id: usize, cnt: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { // enums are encoded as strings or objects // Bunny => "Bunny" // Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]} if cnt == 0 { escape_str(self.writer, name) } else { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, "{{\"variant\":")?; escape_str(self.writer, name)?; write!(self.writer, ",\"fields\":[")?; f(self)?; write!(self.writer, "]}}")?; Ok(()) } } fn emit_enum_variant_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx != 0 { write!(self.writer, ",")?; } f(self) } fn emit_enum_struct_variant<F>(&mut self, name: &str, id: usize, cnt: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_enum_variant(name, id, cnt, f) } fn emit_enum_struct_variant_field<F>(&mut self, _: &str, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_enum_variant_arg(idx, f) } fn emit_struct<F>(&mut self, _: &str, _: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, "{{")?; f(self)?; write!(self.writer, "}}")?; Ok(()) } fn emit_struct_field<F>(&mut self, name: &str, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx != 0 { write!(self.writer, ",")?; } escape_str(self.writer, name)?; write!(self.writer, ":")?; f(self) } fn emit_tuple<F>(&mut self, len: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq(len, f) } fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq_elt(idx, f) } fn emit_tuple_struct<F>(&mut self, _name: &str, len: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq(len, f) } fn emit_tuple_struct_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq_elt(idx, f) } fn emit_option<F>(&mut self, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } f(self) } fn emit_option_none(&mut self) -> EncodeResult { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_nil() } fn emit_option_some<F>(&mut self, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } f(self) } fn emit_seq<F>(&mut self, _len: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, "[")?; f(self)?; write!(self.writer, "]")?; Ok(()) } fn emit_seq_elt<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx != 0 { write!(self.writer, ",")?; } f(self) } fn emit_map<F>(&mut self, _len: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, "{{")?; f(self)?; write!(self.writer, "}}")?; Ok(()) } fn emit_map_elt_key<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx != 0 { write!(self.writer, ",")? } self.is_emitting_map_key = true; f(self)?; self.is_emitting_map_key = false; Ok(()) } fn emit_map_elt_val<F>(&mut self, _idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut Encoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, ":")?; f(self) } } /// Another encoder for JSON, but prints out human-readable JSON instead of /// compact data pub struct PrettyEncoder<'a> { writer: &'a mut (dyn fmt::Write+'a), curr_indent: usize, indent: usize, is_emitting_map_key: bool, } impl<'a> PrettyEncoder<'a> { /// Creates a new encoder whose output will be written to the specified writer pub fn new(writer: &'a mut dyn fmt::Write) -> PrettyEncoder<'a> { PrettyEncoder { writer, curr_indent: 0, indent: 2, is_emitting_map_key: false, } } /// Set the number of spaces to indent for each level. /// This is safe to set during encoding. pub fn set_indent(&mut self, indent: usize) { // self.indent very well could be 0 so we need to use checked division. let level = self.curr_indent.checked_div(self.indent).unwrap_or(0); self.indent = indent; self.curr_indent = level * self.indent; } } impl<'a> ::Encoder for PrettyEncoder<'a> { type Error = EncoderError; fn emit_nil(&mut self) -> EncodeResult { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, "null")?; Ok(()) } fn emit_usize(&mut self, v: usize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u128(&mut self, v: u128) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u64(&mut self, v: u64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u32(&mut self, v: u32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u16(&mut self, v: u16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_u8(&mut self, v: u8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_isize(&mut self, v: isize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i128(&mut self, v: i128) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i64(&mut self, v: i64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i32(&mut self, v: i32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i16(&mut self, v: i16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_i8(&mut self, v: i8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } fn emit_bool(&mut self, v: bool) -> EncodeResult { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if v { write!(self.writer, "true")?; } else { write!(self.writer, "false")?; } Ok(()) } fn emit_f64(&mut self, v: f64) -> EncodeResult { emit_enquoted_if_mapkey!(self, fmt_number_or_null(v)) } fn emit_f32(&mut self, v: f32) -> EncodeResult { self.emit_f64(v as f64) } fn emit_char(&mut self, v: char) -> EncodeResult { escape_char(self.writer, v) } fn emit_str(&mut self, v: &str) -> EncodeResult { escape_str(self.writer, v) } fn emit_enum<F>(&mut self, _name: &str, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { f(self) } fn emit_enum_variant<F>(&mut self, name: &str, _id: usize, cnt: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if cnt == 0 { escape_str(self.writer, name) } else { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } writeln!(self.writer, "{{")?; self.curr_indent += self.indent; spaces(self.writer, self.curr_indent)?; write!(self.writer, "\"variant\": ")?; escape_str(self.writer, name)?; writeln!(self.writer, ",")?; spaces(self.writer, self.curr_indent)?; writeln!(self.writer, "\"fields\": [")?; self.curr_indent += self.indent; f(self)?; self.curr_indent -= self.indent; writeln!(self.writer)?; spaces(self.writer, self.curr_indent)?; self.curr_indent -= self.indent; writeln!(self.writer, "]")?; spaces(self.writer, self.curr_indent)?; write!(self.writer, "}}")?; Ok(()) } } fn emit_enum_variant_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx != 0 { writeln!(self.writer, ",")?; } spaces(self.writer, self.curr_indent)?; f(self) } fn emit_enum_struct_variant<F>(&mut self, name: &str, id: usize, cnt: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_enum_variant(name, id, cnt, f) } fn emit_enum_struct_variant_field<F>(&mut self, _: &str, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_enum_variant_arg(idx, f) } fn emit_struct<F>(&mut self, _: &str, len: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if len == 0 { write!(self.writer, "{{}}")?; } else { write!(self.writer, "{{")?; self.curr_indent += self.indent; f(self)?; self.curr_indent -= self.indent; writeln!(self.writer)?; spaces(self.writer, self.curr_indent)?; write!(self.writer, "}}")?; } Ok(()) } fn emit_struct_field<F>(&mut self, name: &str, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx == 0 { writeln!(self.writer)?; } else { writeln!(self.writer, ",")?; } spaces(self.writer, self.curr_indent)?; escape_str(self.writer, name)?; write!(self.writer, ": ")?; f(self) } fn emit_tuple<F>(&mut self, len: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq(len, f) } fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq_elt(idx, f) } fn emit_tuple_struct<F>(&mut self, _: &str, len: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq(len, f) } fn emit_tuple_struct_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_seq_elt(idx, f) } fn emit_option<F>(&mut self, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } f(self) } fn emit_option_none(&mut self) -> EncodeResult { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } self.emit_nil() } fn emit_option_some<F>(&mut self, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } f(self) } fn emit_seq<F>(&mut self, len: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if len == 0 { write!(self.writer, "[]")?; } else { write!(self.writer, "[")?; self.curr_indent += self.indent; f(self)?; self.curr_indent -= self.indent; writeln!(self.writer)?; spaces(self.writer, self.curr_indent)?; write!(self.writer, "]")?; } Ok(()) } fn emit_seq_elt<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx == 0 { writeln!(self.writer)?; } else { writeln!(self.writer, ",")?; } spaces(self.writer, self.curr_indent)?; f(self) } fn emit_map<F>(&mut self, len: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if len == 0 { write!(self.writer, "{{}}")?; } else { write!(self.writer, "{{")?; self.curr_indent += self.indent; f(self)?; self.curr_indent -= self.indent; writeln!(self.writer)?; spaces(self.writer, self.curr_indent)?; write!(self.writer, "}}")?; } Ok(()) } fn emit_map_elt_key<F>(&mut self, idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } if idx == 0 { writeln!(self.writer)?; } else { writeln!(self.writer, ",")?; } spaces(self.writer, self.curr_indent)?; self.is_emitting_map_key = true; f(self)?; self.is_emitting_map_key = false; Ok(()) } fn emit_map_elt_val<F>(&mut self, _idx: usize, f: F) -> EncodeResult where F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, { if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } write!(self.writer, ": ")?; f(self) } } impl Encodable for Json { fn encode<E: ::Encoder>(&self, e: &mut E) -> Result<(), E::Error> { match *self { Json::I64(v) => v.encode(e), Json::U64(v) => v.encode(e), Json::F64(v) => v.encode(e), Json::String(ref v) => v.encode(e), Json::Boolean(v) => v.encode(e), Json::Array(ref v) => v.encode(e), Json::Object(ref v) => v.encode(e), Json::Null => e.emit_nil(), } } } /// Create an `AsJson` wrapper which can be used to print a value as JSON /// on-the-fly via `write!` pub fn as_json<T>(t: &T) -> AsJson<T> { AsJson { inner: t } } /// Create an `AsPrettyJson` wrapper which can be used to print a value as JSON /// on-the-fly via `write!` pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<T> { AsPrettyJson { inner: t, indent: None } } impl Json { /// Borrow this json object as a pretty object to generate a pretty /// representation for it via `Display`. pub fn pretty(&self) -> PrettyJson { PrettyJson { inner: self } } /// If the Json value is an Object, returns the value associated with the provided key. /// Otherwise, returns None. pub fn find<'a>(&'a self, key: &str) -> Option<&'a Json>{ match *self { Json::Object(ref map) => map.get(key), _ => None } } /// Attempts to get a nested Json Object for each key in `keys`. /// If any key is found not to exist, find_path will return None. /// Otherwise, it will return the Json value associated with the final key. pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{ let mut target = self; for key in keys { target = target.find(*key)?; } Some(target) } /// If the Json value is an Object, performs a depth-first search until /// a value associated with the provided key is found. If no value is found /// or the Json value is not an Object, returns None. pub fn search<'a>(&'a self, key: &str) -> Option<&'a Json> { match self { &Json::Object(ref map) => { match map.get(key) { Some(json_value) => Some(json_value), None => { for (_, v) in map { match v.search(key) { x if x.is_some() => return x, _ => () } } None } } }, _ => None } } /// Returns true if the Json value is an Object. Returns false otherwise. pub fn is_object(&self) -> bool { self.as_object().is_some() } /// If the Json value is an Object, returns the associated BTreeMap. /// Returns None otherwise. pub fn as_object(&self) -> Option<&Object> { match *self { Json::Object(ref map) => Some(map), _ => None } } /// Returns true if the Json value is an Array. Returns false otherwise. pub fn is_array(&self) -> bool { self.as_array().is_some() } /// If the Json value is an Array, returns the associated vector. /// Returns None otherwise. pub fn as_array(&self) -> Option<&Array> { match *self { Json::Array(ref array) => Some(&*array), _ => None } } /// Returns true if the Json value is a String. Returns false otherwise. pub fn is_string(&self) -> bool { self.as_string().is_some() } /// If the Json value is a String, returns the associated str. /// Returns None otherwise. pub fn as_string(&self) -> Option<&str> { match *self { Json::String(ref s) => Some(&s[..]), _ => None } } /// Returns true if the Json value is a Number. Returns false otherwise. pub fn is_number(&self) -> bool { match *self { Json::I64(_) | Json::U64(_) | Json::F64(_) => true, _ => false, } } /// Returns true if the Json value is a i64. Returns false otherwise. pub fn is_i64(&self) -> bool { match *self { Json::I64(_) => true, _ => false, } } /// Returns true if the Json value is a u64. Returns false otherwise. pub fn is_u64(&self) -> bool { match *self { Json::U64(_) => true, _ => false, } } /// Returns true if the Json value is a f64. Returns false otherwise. pub fn is_f64(&self) -> bool { match *self { Json::F64(_) => true, _ => false, } } /// If the Json value is a number, return or cast it to a i64. /// Returns None otherwise. pub fn as_i64(&self) -> Option<i64> { match *self { Json::I64(n) => Some(n), Json::U64(n) => Some(n as i64), _ => None } } /// If the Json value is a number, return or cast it to a u64. /// Returns None otherwise. pub fn as_u64(&self) -> Option<u64> { match *self { Json::I64(n) => Some(n as u64), Json::U64(n) => Some(n), _ => None } } /// If the Json value is a number, return or cast it to a f64. /// Returns None otherwise. pub fn as_f64(&self) -> Option<f64> { match *self { Json::I64(n) => Some(n as f64), Json::U64(n) => Some(n as f64), Json::F64(n) => Some(n), _ => None } } /// Returns true if the Json value is a Boolean. Returns false otherwise. pub fn is_boolean(&self) -> bool { self.as_boolean().is_some() } /// If the Json value is a Boolean, returns the associated bool. /// Returns None otherwise. pub fn as_boolean(&self) -> Option<bool> { match *self { Json::Boolean(b) => Some(b), _ => None } } /// Returns true if the Json value is a Null. Returns false otherwise. pub fn is_null(&self) -> bool { self.as_null().is_some() } /// If the Json value is a Null, returns (). /// Returns None otherwise. pub fn as_null(&self) -> Option<()> { match *self { Json::Null => Some(()), _ => None } } } impl<'a> Index<&'a str> for Json { type Output = Json; fn index(&self, idx: &'a str) -> &Json { self.find(idx).unwrap() } } impl Index<usize> for Json { type Output = Json; fn index(&self, idx: usize) -> &Json { match *self { Json::Array(ref v) => &v[idx], _ => panic!("can only index Json with usize if it is an array") } } } /// The output of the streaming parser. #[derive(PartialEq, Clone, Debug)] pub enum JsonEvent { ObjectStart, ObjectEnd, ArrayStart, ArrayEnd, BooleanValue(bool), I64Value(i64), U64Value(u64), F64Value(f64), StringValue(string::String), NullValue, Error(ParserError), } #[derive(PartialEq, Debug)] enum ParserState { // Parse a value in an array, true means first element. ParseArray(bool), // Parse ',' or ']' after an element in an array. ParseArrayComma, // Parse a key:value in an object, true means first element. ParseObject(bool), // Parse ',' or ']' after an element in an object. ParseObjectComma, // Initial state. ParseStart, // Expecting the stream to end. ParseBeforeFinish, // Parsing can't continue. ParseFinished, } /// A Stack represents the current position of the parser in the logical /// structure of the JSON stream. /// For example foo.bar[3].x pub struct Stack { stack: Vec<InternalStackElement>, str_buffer: Vec<u8>, } /// StackElements compose a Stack. /// For example, StackElement::Key("foo"), StackElement::Key("bar"), /// StackElement::Index(3) and StackElement::Key("x") are the /// StackElements compositing the stack that represents foo.bar[3].x #[derive(PartialEq, Clone, Debug)] pub enum StackElement<'l> { Index(u32), Key(&'l str), } // Internally, Key elements are stored as indices in a buffer to avoid // allocating a string for every member of an object. #[derive(PartialEq, Clone, Debug)] enum InternalStackElement { InternalIndex(u32), InternalKey(u16, u16), // start, size } impl Stack { pub fn new() -> Stack { Stack { stack: Vec::new(), str_buffer: Vec::new() } } /// Returns The number of elements in the Stack. pub fn len(&self) -> usize { self.stack.len() } /// Returns true if the stack is empty. pub fn is_empty(&self) -> bool { self.stack.is_empty() } /// Provides access to the StackElement at a given index. /// lower indices are at the bottom of the stack while higher indices are /// at the top. pub fn get(&self, idx: usize) -> StackElement { match self.stack[idx] { InternalIndex(i) => StackElement::Index(i), InternalKey(start, size) => { StackElement::Key(str::from_utf8( &self.str_buffer[start as usize .. start as usize + size as usize]) .unwrap()) } } } /// Compares this stack with an array of StackElements. pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool { if self.stack.len() != rhs.len() { return false; } for (i, r) in rhs.iter().enumerate() { if self.get(i) != *r { return false; } } true } /// Returns true if the bottom-most elements of this stack are the same as /// the ones passed as parameter. pub fn starts_with(&self, rhs: &[StackElement]) -> bool { if self.stack.len() < rhs.len() { return false; } for (i, r) in rhs.iter().enumerate() { if self.get(i) != *r { return false; } } true } /// Returns true if the top-most elements of this stack are the same as /// the ones passed as parameter. pub fn ends_with(&self, rhs: &[StackElement]) -> bool { if self.stack.len() < rhs.len() { return false; } let offset = self.stack.len() - rhs.len(); for (i, r) in rhs.iter().enumerate() { if self.get(i + offset) != *r { return false; } } true } /// Returns the top-most element (if any). pub fn top(&self) -> Option<StackElement> { match self.stack.last() { None => None, Some(&InternalIndex(i)) => Some(StackElement::Index(i)), Some(&InternalKey(start, size)) => { Some(StackElement::Key(str::from_utf8( &self.str_buffer[start as usize .. (start+size) as usize] ).unwrap())) } } } // Used by Parser to insert StackElement::Key elements at the top of the stack. fn push_key(&mut self, key: string::String) { self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16)); self.str_buffer.extend(key.as_bytes()); } // Used by Parser to insert StackElement::Index elements at the top of the stack. fn push_index(&mut self, index: u32) { self.stack.push(InternalIndex(index)); } // Used by Parser to remove the top-most element of the stack. fn pop(&mut self) { assert!(!self.is_empty()); match *self.stack.last().unwrap() { InternalKey(_, sz) => { let new_size = self.str_buffer.len() - sz as usize; self.str_buffer.truncate(new_size); } InternalIndex(_) => {} } self.stack.pop(); } // Used by Parser to test whether the top-most element is an index. fn last_is_index(&self) -> bool { match self.stack.last() { Some(InternalIndex(_)) => true, _ => false, } } // Used by Parser to increment the index of the top-most element. fn bump_index(&mut self) { let len = self.stack.len(); let idx = match *self.stack.last().unwrap() { InternalIndex(i) => { i + 1 } _ => { panic!(); } }; self.stack[len - 1] = InternalIndex(idx); } } /// A streaming JSON parser implemented as an iterator of JsonEvent, consuming /// an iterator of char. pub struct Parser<T> { rdr: T, ch: Option<char>, line: usize, col: usize, // We maintain a stack representing where we are in the logical structure // of the JSON stream. stack: Stack, // A state machine is kept to make it possible to interrupt and resume parsing. state: ParserState, } impl<T: Iterator<Item=char>> Iterator for Parser<T> { type Item = JsonEvent; fn next(&mut self) -> Option<JsonEvent> { if self.state == ParseFinished { return None; } if self.state == ParseBeforeFinish { self.parse_whitespace(); // Make sure there is no trailing characters. if self.eof() { self.state = ParseFinished; return None; } else { return Some(self.error_event(TrailingCharacters)); } } Some(self.parse()) } } impl<T: Iterator<Item=char>> Parser<T> { /// Creates the JSON parser. pub fn new(rdr: T) -> Parser<T> { let mut p = Parser { rdr, ch: Some('\x00'), line: 1, col: 0, stack: Stack::new(), state: ParseStart, }; p.bump(); p } /// Provides access to the current position in the logical structure of the /// JSON stream. pub fn stack(&self) -> &Stack { &self.stack } fn eof(&self) -> bool { self.ch.is_none() } fn ch_or_null(&self) -> char { self.ch.unwrap_or('\x00') } fn bump(&mut self) { self.ch = self.rdr.next(); if self.ch_is('\n') { self.line += 1; self.col = 1; } else { self.col += 1; } } fn next_char(&mut self) -> Option<char> { self.bump(); self.ch } fn ch_is(&self, c: char) -> bool { self.ch == Some(c) } fn error<U>(&self, reason: ErrorCode) -> Result<U, ParserError> { Err(SyntaxError(reason, self.line, self.col)) } fn parse_whitespace(&mut self) { while self.ch_is(' ') || self.ch_is('\n') || self.ch_is('\t') || self.ch_is('\r') { self.bump(); } } fn parse_number(&mut self) -> JsonEvent { let mut neg = false; if self.ch_is('-') { self.bump(); neg = true; } let res = match self.parse_u64() { Ok(res) => res, Err(e) => { return Error(e); } }; if self.ch_is('.') || self.ch_is('e') || self.ch_is('E') { let mut res = res as f64; if self.ch_is('.') { res = match self.parse_decimal(res) { Ok(res) => res, Err(e) => { return Error(e); } }; } if self.ch_is('e') || self.ch_is('E') { res = match self.parse_exponent(res) { Ok(res) => res, Err(e) => { return Error(e); } }; } if neg { res *= -1.0; } F64Value(res) } else if neg { let res = (res as i64).wrapping_neg(); // Make sure we didn't underflow. if res > 0 { Error(SyntaxError(InvalidNumber, self.line, self.col)) } else { I64Value(res) } } else { U64Value(res) } } fn parse_u64(&mut self) -> Result<u64, ParserError> { let mut accum = 0u64; let last_accum = 0; // necessary to detect overflow. match self.ch_or_null() { '0' => { self.bump(); // A leading '0' must be the only digit before the decimal point. if let '0' ..= '9' = self.ch_or_null() { return self.error(InvalidNumber) } }, '1' ..= '9' => { while !self.eof() { match self.ch_or_null() { c @ '0' ..= '9' => { accum = accum.wrapping_mul(10); accum = accum.wrapping_add((c as u64) - ('0' as u64)); // Detect overflow by comparing to the last value. if accum <= last_accum { return self.error(InvalidNumber); } self.bump(); } _ => break, } } } _ => return self.error(InvalidNumber), } Ok(accum) } fn parse_decimal(&mut self, mut res: f64) -> Result<f64, ParserError> { self.bump(); // Make sure a digit follows the decimal place. match self.ch_or_null() { '0' ..= '9' => (), _ => return self.error(InvalidNumber) } let mut dec = 1.0; while !self.eof() { match self.ch_or_null() { c @ '0' ..= '9' => { dec /= 10.0; res += (((c as isize) - ('0' as isize)) as f64) * dec; self.bump(); } _ => break, } } Ok(res) } fn parse_exponent(&mut self, mut res: f64) -> Result<f64, ParserError> { self.bump(); let mut exp = 0; let mut neg_exp = false; if self.ch_is('+') { self.bump(); } else if self.ch_is('-') { self.bump(); neg_exp = true; } // Make sure a digit follows the exponent place. match self.ch_or_null() { '0' ..= '9' => (), _ => return self.error(InvalidNumber) } while !self.eof() { match self.ch_or_null() { c @ '0' ..= '9' => { exp *= 10; exp += (c as usize) - ('0' as usize); self.bump(); } _ => break } } let exp = 10_f64.powi(exp as i32); if neg_exp { res /= exp; } else { res *= exp; } Ok(res) } fn decode_hex_escape(&mut self) -> Result<u16, ParserError> { let mut i = 0; let mut n = 0; while i < 4 && !self.eof() { self.bump(); n = match self.ch_or_null() { c @ '0' ..= '9' => n * 16 + ((c as u16) - ('0' as u16)), 'a' | 'A' => n * 16 + 10, 'b' | 'B' => n * 16 + 11, 'c' | 'C' => n * 16 + 12, 'd' | 'D' => n * 16 + 13, 'e' | 'E' => n * 16 + 14, 'f' | 'F' => n * 16 + 15, _ => return self.error(InvalidEscape) }; i += 1; } // Error out if we didn't parse 4 digits. if i != 4 { return self.error(InvalidEscape); } Ok(n) } fn parse_str(&mut self) -> Result<string::String, ParserError> { let mut escape = false; let mut res = string::String::new(); loop { self.bump(); if self.eof() { return self.error(EOFWhileParsingString); } if escape { match self.ch_or_null() { '"' => res.push('"'), '\\' => res.push('\\'), '/' => res.push('/'), 'b' => res.push('\x08'), 'f' => res.push('\x0c'), 'n' => res.push('\n'), 'r' => res.push('\r'), 't' => res.push('\t'), 'u' => match self.decode_hex_escape()? { 0xDC00 ..= 0xDFFF => { return self.error(LoneLeadingSurrogateInHexEscape) } // Non-BMP characters are encoded as a sequence of // two hex escapes, representing UTF-16 surrogates. n1 @ 0xD800 ..= 0xDBFF => { match (self.next_char(), self.next_char()) { (Some('\\'), Some('u')) => (), _ => return self.error(UnexpectedEndOfHexEscape), } let n2 = self.decode_hex_escape()?; if n2 < 0xDC00 || n2 > 0xDFFF { return self.error(LoneLeadingSurrogateInHexEscape) } let c = (((n1 - 0xD800) as u32) << 10 | (n2 - 0xDC00) as u32) + 0x1_0000; res.push(char::from_u32(c).unwrap()); } n => match char::from_u32(n as u32) { Some(c) => res.push(c), None => return self.error(InvalidUnicodeCodePoint), }, }, _ => return self.error(InvalidEscape), } escape = false; } else if self.ch_is('\\') { escape = true; } else { match self.ch { Some('"') => { self.bump(); return Ok(res); }, Some(c) => res.push(c), None => unreachable!() } } } } // Invoked at each iteration, consumes the stream until it has enough // information to return a JsonEvent. // Manages an internal state so that parsing can be interrupted and resumed. // Also keeps track of the position in the logical structure of the json // stream isize the form of a stack that can be queried by the user using the // stack() method. fn parse(&mut self) -> JsonEvent { loop { // The only paths where the loop can spin a new iteration // are in the cases ParseArrayComma and ParseObjectComma if ',' // is parsed. In these cases the state is set to (respectively) // ParseArray(false) and ParseObject(false), which always return, // so there is no risk of getting stuck in an infinite loop. // All other paths return before the end of the loop's iteration. self.parse_whitespace(); match self.state { ParseStart => { return self.parse_start(); } ParseArray(first) => { return self.parse_array(first); } ParseArrayComma => { if let Some(evt) = self.parse_array_comma_or_end() { return evt; } } ParseObject(first) => { return self.parse_object(first); } ParseObjectComma => { self.stack.pop(); if self.ch_is(',') { self.state = ParseObject(false); self.bump(); } else { return self.parse_object_end(); } } _ => { return self.error_event(InvalidSyntax); } } } } fn parse_start(&mut self) -> JsonEvent { let val = self.parse_value(); self.state = match val { Error(_) => ParseFinished, ArrayStart => ParseArray(true), ObjectStart => ParseObject(true), _ => ParseBeforeFinish, }; val } fn parse_array(&mut self, first: bool) -> JsonEvent { if self.ch_is(']') { if !first { self.error_event(InvalidSyntax) } else { self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); ArrayEnd } } else { if first { self.stack.push_index(0); } let val = self.parse_value(); self.state = match val { Error(_) => ParseFinished, ArrayStart => ParseArray(true), ObjectStart => ParseObject(true), _ => ParseArrayComma, }; val } } fn parse_array_comma_or_end(&mut self) -> Option<JsonEvent> { if self.ch_is(',') { self.stack.bump_index(); self.state = ParseArray(false); self.bump(); None } else if self.ch_is(']') { self.stack.pop(); self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); Some(ArrayEnd) } else if self.eof() { Some(self.error_event(EOFWhileParsingArray)) } else { Some(self.error_event(InvalidSyntax)) } } fn parse_object(&mut self, first: bool) -> JsonEvent { if self.ch_is('}') { if !first { if self.stack.is_empty() { return self.error_event(TrailingComma); } else { self.stack.pop(); } } self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); return ObjectEnd; } if self.eof() { return self.error_event(EOFWhileParsingObject); } if !self.ch_is('"') { return self.error_event(KeyMustBeAString); } let s = match self.parse_str() { Ok(s) => s, Err(e) => { self.state = ParseFinished; return Error(e); } }; self.parse_whitespace(); if self.eof() { return self.error_event(EOFWhileParsingObject); } else if self.ch_or_null() != ':' { return self.error_event(ExpectedColon); } self.stack.push_key(s); self.bump(); self.parse_whitespace(); let val = self.parse_value(); self.state = match val { Error(_) => ParseFinished, ArrayStart => ParseArray(true), ObjectStart => ParseObject(true), _ => ParseObjectComma, }; val } fn parse_object_end(&mut self) -> JsonEvent { if self.ch_is('}') { self.state = if self.stack.is_empty() { ParseBeforeFinish } else if self.stack.last_is_index() { ParseArrayComma } else { ParseObjectComma }; self.bump(); ObjectEnd } else if self.eof() { self.error_event(EOFWhileParsingObject) } else { self.error_event(InvalidSyntax) } } fn parse_value(&mut self) -> JsonEvent { if self.eof() { return self.error_event(EOFWhileParsingValue); } match self.ch_or_null() { 'n' => { self.parse_ident("ull", NullValue) } 't' => { self.parse_ident("rue", BooleanValue(true)) } 'f' => { self.parse_ident("alse", BooleanValue(false)) } '0' ..= '9' | '-' => self.parse_number(), '"' => match self.parse_str() { Ok(s) => StringValue(s), Err(e) => Error(e), }, '[' => { self.bump(); ArrayStart } '{' => { self.bump(); ObjectStart } _ => { self.error_event(InvalidSyntax) } } } fn parse_ident(&mut self, ident: &str, value: JsonEvent) -> JsonEvent { if ident.chars().all(|c| Some(c) == self.next_char()) { self.bump(); value } else { Error(SyntaxError(InvalidSyntax, self.line, self.col)) } } fn error_event(&mut self, reason: ErrorCode) -> JsonEvent { self.state = ParseFinished; Error(SyntaxError(reason, self.line, self.col)) } } /// A Builder consumes a json::Parser to create a generic Json structure. pub struct Builder<T> { parser: Parser<T>, token: Option<JsonEvent>, } impl<T: Iterator<Item=char>> Builder<T> { /// Create a JSON Builder. pub fn new(src: T) -> Builder<T> { Builder { parser: Parser::new(src), token: None, } } // Decode a Json value from a Parser. pub fn build(&mut self) -> Result<Json, BuilderError> { self.bump(); let result = self.build_value(); self.bump(); match self.token { None => {} Some(Error(ref e)) => { return Err(e.clone()); } ref tok => { panic!("unexpected token {:?}", tok.clone()); } } result } fn bump(&mut self) { self.token = self.parser.next(); } fn build_value(&mut self) -> Result<Json, BuilderError> { match self.token { Some(NullValue) => Ok(Json::Null), Some(I64Value(n)) => Ok(Json::I64(n)), Some(U64Value(n)) => Ok(Json::U64(n)), Some(F64Value(n)) => Ok(Json::F64(n)), Some(BooleanValue(b)) => Ok(Json::Boolean(b)), Some(StringValue(ref mut s)) => { let mut temp = string::String::new(); swap(s, &mut temp); Ok(Json::String(temp)) } Some(Error(ref e)) => Err(e.clone()), Some(ArrayStart) => self.build_array(), Some(ObjectStart) => self.build_object(), Some(ObjectEnd) => self.parser.error(InvalidSyntax), Some(ArrayEnd) => self.parser.error(InvalidSyntax), None => self.parser.error(EOFWhileParsingValue), } } fn build_array(&mut self) -> Result<Json, BuilderError> { self.bump(); let mut values = Vec::new(); loop { if self.token == Some(ArrayEnd) { return Ok(Json::Array(values.into_iter().collect())); } match self.build_value() { Ok(v) => values.push(v), Err(e) => { return Err(e) } } self.bump(); } } fn build_object(&mut self) -> Result<Json, BuilderError> { self.bump(); let mut values = BTreeMap::new(); loop { match self.token { Some(ObjectEnd) => { return Ok(Json::Object(values)); } Some(Error(ref e)) => { return Err(e.clone()); } None => { break; } _ => {} } let key = match self.parser.stack().top() { Some(StackElement::Key(k)) => { k.to_owned() } _ => { panic!("invalid state"); } }; match self.build_value() { Ok(value) => { values.insert(key, value); } Err(e) => { return Err(e); } } self.bump(); } self.parser.error(EOFWhileParsingObject) } } /// Decodes a json value from an `&mut io::Read` pub fn from_reader(rdr: &mut dyn Read) -> Result<Json, BuilderError> { let mut contents = Vec::new(); match rdr.read_to_end(&mut contents) { Ok(c) => c, Err(e) => return Err(io_error_to_error(e)) }; let s = match str::from_utf8(&contents).ok() { Some(s) => s, _ => return Err(SyntaxError(NotUtf8, 0, 0)) }; let mut builder = Builder::new(s.chars()); builder.build() } /// Decodes a json value from a string pub fn from_str(s: &str) -> Result<Json, BuilderError> { let mut builder = Builder::new(s.chars()); builder.build() } /// A structure to decode JSON to values in rust. pub struct Decoder { stack: Vec<Json>, } impl Decoder { /// Creates a new decoder instance for decoding the specified JSON value. pub fn new(json: Json) -> Decoder { Decoder { stack: vec![json] } } fn pop(&mut self) -> Json { self.stack.pop().unwrap() } } macro_rules! expect { ($e:expr, Null) => ({ match $e { Json::Null => Ok(()), other => Err(ExpectedError("Null".to_owned(), other.to_string())) } }); ($e:expr, $t:ident) => ({ match $e { Json::$t(v) => Ok(v), other => { Err(ExpectedError(stringify!($t).to_owned(), other.to_string())) } } }) } macro_rules! read_primitive { ($name:ident, $ty:ty) => { fn $name(&mut self) -> DecodeResult<$ty> { match self.pop() { Json::I64(f) => Ok(f as $ty), Json::U64(f) => Ok(f as $ty), Json::F64(f) => Err(ExpectedError("Integer".to_owned(), f.to_string())), // re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc) // is going to have a string here, as per JSON spec. Json::String(s) => match s.parse().ok() { Some(f) => Ok(f), None => Err(ExpectedError("Number".to_owned(), s)), }, value => Err(ExpectedError("Number".to_owned(), value.to_string())), } } } } impl ::Decoder for Decoder { type Error = DecoderError; fn read_nil(&mut self) -> DecodeResult<()> { expect!(self.pop(), Null) } read_primitive! { read_usize, usize } read_primitive! { read_u8, u8 } read_primitive! { read_u16, u16 } read_primitive! { read_u32, u32 } read_primitive! { read_u64, u64 } read_primitive! { read_u128, u128 } read_primitive! { read_isize, isize } read_primitive! { read_i8, i8 } read_primitive! { read_i16, i16 } read_primitive! { read_i32, i32 } read_primitive! { read_i64, i64 } read_primitive! { read_i128, i128 } fn read_f32(&mut self) -> DecodeResult<f32> { self.read_f64().map(|x| x as f32) } fn read_f64(&mut self) -> DecodeResult<f64> { match self.pop() { Json::I64(f) => Ok(f as f64), Json::U64(f) => Ok(f as f64), Json::F64(f) => Ok(f), Json::String(s) => { // re: #12967.. a type w/ numeric keys (ie HashMap<usize, V> etc) // is going to have a string here, as per JSON spec. match s.parse().ok() { Some(f) => Ok(f), None => Err(ExpectedError("Number".to_owned(), s)), } }, Json::Null => Ok(f64::NAN), value => Err(ExpectedError("Number".to_owned(), value.to_string())) } } fn read_bool(&mut self) -> DecodeResult<bool> { expect!(self.pop(), Boolean) } fn read_char(&mut self) -> DecodeResult<char> { let s = self.read_str()?; { let mut it = s.chars(); match (it.next(), it.next()) { // exactly one character (Some(c), None) => return Ok(c), _ => () } } Err(ExpectedError("single character string".to_owned(), s.to_string())) } fn read_str(&mut self) -> DecodeResult<Cow<str>> { expect!(self.pop(), String).map(Cow::Owned) } fn read_enum<T, F>(&mut self, _name: &str, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { f(self) } fn read_enum_variant<T, F>(&mut self, names: &[&str], mut f: F) -> DecodeResult<T> where F: FnMut(&mut Decoder, usize) -> DecodeResult<T>, { let name = match self.pop() { Json::String(s) => s, Json::Object(mut o) => { let n = match o.remove(&"variant".to_owned()) { Some(Json::String(s)) => s, Some(val) => { return Err(ExpectedError("String".to_owned(), val.to_string())) } None => { return Err(MissingFieldError("variant".to_owned())) } }; match o.remove(&"fields".to_string()) { Some(Json::Array(l)) => { self.stack.extend(l.into_iter().rev()); }, Some(val) => { return Err(ExpectedError("Array".to_owned(), val.to_string())) } None => { return Err(MissingFieldError("fields".to_owned())) } } n } json => { return Err(ExpectedError("String or Object".to_owned(), json.to_string())) } }; let idx = match names.iter().position(|n| *n == &name[..]) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; f(self, idx) } fn read_enum_variant_arg<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { f(self) } fn read_enum_struct_variant<T, F>(&mut self, names: &[&str], f: F) -> DecodeResult<T> where F: FnMut(&mut Decoder, usize) -> DecodeResult<T>, { self.read_enum_variant(names, f) } fn read_enum_struct_variant_field<T, F>(&mut self, _name: &str, idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { self.read_enum_variant_arg(idx, f) } fn read_struct<T, F>(&mut self, _name: &str, _len: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { let value = f(self)?; self.pop(); Ok(value) } fn read_struct_field<T, F>(&mut self, name: &str, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { let mut obj = expect!(self.pop(), Object)?; let value = match obj.remove(&name.to_string()) { None => { // Add a Null and try to parse it as an Option<_> // to get None as a default value. self.stack.push(Json::Null); match f(self) { Ok(x) => x, Err(_) => return Err(MissingFieldError(name.to_string())), } }, Some(json) => { self.stack.push(json); f(self)? } }; self.stack.push(Json::Object(obj)); Ok(value) } fn read_tuple<T, F>(&mut self, tuple_len: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { self.read_seq(move |d, len| { if len == tuple_len { f(d) } else { Err(ExpectedError(format!("Tuple{}", tuple_len), format!("Tuple{}", len))) } }) } fn read_tuple_arg<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { self.read_seq_elt(idx, f) } fn read_tuple_struct<T, F>(&mut self, _name: &str, len: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { self.read_tuple(len, f) } fn read_tuple_struct_arg<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { self.read_tuple_arg(idx, f) } fn read_option<T, F>(&mut self, mut f: F) -> DecodeResult<T> where F: FnMut(&mut Decoder, bool) -> DecodeResult<T>, { match self.pop() { Json::Null => f(self, false), value => { self.stack.push(value); f(self, true) } } } fn read_seq<T, F>(&mut self, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder, usize) -> DecodeResult<T>, { let array = expect!(self.pop(), Array)?; let len = array.len(); self.stack.extend(array.into_iter().rev()); f(self, len) } fn read_seq_elt<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { f(self) } fn read_map<T, F>(&mut self, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder, usize) -> DecodeResult<T>, { let obj = expect!(self.pop(), Object)?; let len = obj.len(); for (key, value) in obj { self.stack.push(value); self.stack.push(Json::String(key)); } f(self, len) } fn read_map_elt_key<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { f(self) } fn read_map_elt_val<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Decoder) -> DecodeResult<T>, { f(self) } fn error(&mut self, err: &str) -> DecoderError { ApplicationError(err.to_string()) } } /// A trait for converting values to JSON pub trait ToJson { /// Converts the value of `self` to an instance of JSON fn to_json(&self) -> Json; } macro_rules! to_json_impl_i64 { ($($t:ty), +) => ( $(impl ToJson for $t { fn to_json(&self) -> Json { Json::I64(*self as i64) } })+ ) } to_json_impl_i64! { isize, i8, i16, i32, i64 } macro_rules! to_json_impl_u64 { ($($t:ty), +) => ( $(impl ToJson for $t { fn to_json(&self) -> Json { Json::U64(*self as u64) } })+ ) } to_json_impl_u64! { usize, u8, u16, u32, u64 } impl ToJson for Json { fn to_json(&self) -> Json { self.clone() } } impl ToJson for f32 { fn to_json(&self) -> Json { (*self as f64).to_json() } } impl ToJson for f64 { fn to_json(&self) -> Json { match self.classify() { Fp::Nan | Fp::Infinite => Json::Null, _ => Json::F64(*self) } } } impl ToJson for () { fn to_json(&self) -> Json { Json::Null } } impl ToJson for bool { fn to_json(&self) -> Json { Json::Boolean(*self) } } impl ToJson for str { fn to_json(&self) -> Json { Json::String(self.to_string()) } } impl ToJson for string::String { fn to_json(&self) -> Json { Json::String((*self).clone()) } } macro_rules! tuple_impl { // use variables to indicate the arity of the tuple ($($tyvar:ident),* ) => { // the trailing commas are for the 1 tuple impl< $( $tyvar : ToJson ),* > ToJson for ( $( $tyvar ),* , ) { #[inline] #[allow(non_snake_case)] fn to_json(&self) -> Json { match *self { ($(ref $tyvar),*,) => Json::Array(vec![$($tyvar.to_json()),*]) } } } } } tuple_impl!{A} tuple_impl!{A, B} tuple_impl!{A, B, C} tuple_impl!{A, B, C, D} tuple_impl!{A, B, C, D, E} tuple_impl!{A, B, C, D, E, F} tuple_impl!{A, B, C, D, E, F, G} tuple_impl!{A, B, C, D, E, F, G, H} tuple_impl!{A, B, C, D, E, F, G, H, I} tuple_impl!{A, B, C, D, E, F, G, H, I, J} tuple_impl!{A, B, C, D, E, F, G, H, I, J, K} tuple_impl!{A, B, C, D, E, F, G, H, I, J, K, L} impl<A: ToJson> ToJson for [A] { fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } } impl<A: ToJson> ToJson for Vec<A> { fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } } impl<A: ToJson> ToJson for BTreeMap<string::String, A> { fn to_json(&self) -> Json { let mut d = BTreeMap::new(); for (key, value) in self { d.insert((*key).clone(), value.to_json()); } Json::Object(d) } } impl<A: ToJson> ToJson for HashMap<string::String, A> { fn to_json(&self) -> Json { let mut d = BTreeMap::new(); for (key, value) in self { d.insert((*key).clone(), value.to_json()); } Json::Object(d) } } impl<A:ToJson> ToJson for Option<A> { fn to_json(&self) -> Json { match *self { None => Json::Null, Some(ref value) => value.to_json() } } } struct FormatShim<'a, 'b: 'a> { inner: &'a mut fmt::Formatter<'b>, } impl<'a, 'b> fmt::Write for FormatShim<'a, 'b> { fn write_str(&mut self, s: &str) -> fmt::Result { match self.inner.write_str(s) { Ok(_) => Ok(()), Err(_) => Err(fmt::Error) } } } impl fmt::Display for Json { /// Encodes a json value into a string fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut shim = FormatShim { inner: f }; let mut encoder = Encoder::new(&mut shim); match self.encode(&mut encoder) { Ok(_) => Ok(()), Err(_) => Err(fmt::Error) } } } impl<'a> fmt::Display for PrettyJson<'a> { /// Encodes a json value into a string fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut shim = FormatShim { inner: f }; let mut encoder = PrettyEncoder::new(&mut shim); match self.inner.encode(&mut encoder) { Ok(_) => Ok(()), Err(_) => Err(fmt::Error) } } } impl<'a, T: Encodable> fmt::Display for AsJson<'a, T> { /// Encodes a json value into a string fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut shim = FormatShim { inner: f }; let mut encoder = Encoder::new(&mut shim); match self.inner.encode(&mut encoder) { Ok(_) => Ok(()), Err(_) => Err(fmt::Error) } } } impl<'a, T> AsPrettyJson<'a, T> { /// Set the indentation level for the emitted JSON pub fn indent(mut self, indent: usize) -> AsPrettyJson<'a, T> { self.indent = Some(indent); self } } impl<'a, T: Encodable> fmt::Display for AsPrettyJson<'a, T> { /// Encodes a json value into a string fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut shim = FormatShim { inner: f }; let mut encoder = PrettyEncoder::new(&mut shim); if let Some(n) = self.indent { encoder.set_indent(n); } match self.inner.encode(&mut encoder) { Ok(_) => Ok(()), Err(_) => Err(fmt::Error) } } } impl FromStr for Json { type Err = BuilderError; fn from_str(s: &str) -> Result<Json, BuilderError> { from_str(s) } } #[cfg(test)] mod tests { extern crate test; use self::Animal::*; use self::test::Bencher; use {Encodable, Decodable}; use super::Json::*; use super::ErrorCode::*; use super::ParserError::*; use super::DecoderError::*; use super::JsonEvent::*; use super::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, StackElement, Stack, Decoder, Encoder, EncoderError}; use std::{i64, u64, f32, f64}; use std::io::prelude::*; use std::collections::BTreeMap; use std::string; #[derive(RustcDecodable, Eq, PartialEq, Debug)] struct OptionData { opt: Option<usize>, } #[test] fn test_decode_option_none() { let s ="{}"; let obj: OptionData = super::decode(s).unwrap(); assert_eq!(obj, OptionData { opt: None }); } #[test] fn test_decode_option_some() { let s = "{ \"opt\": 10 }"; let obj: OptionData = super::decode(s).unwrap(); assert_eq!(obj, OptionData { opt: Some(10) }); } #[test] fn test_decode_option_malformed() { check_err::<OptionData>("{ \"opt\": [] }", ExpectedError("Number".to_string(), "[]".to_string())); check_err::<OptionData>("{ \"opt\": false }", ExpectedError("Number".to_string(), "false".to_string())); } #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] enum Animal { Dog, Frog(string::String, isize) } #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] struct Inner { a: (), b: usize, c: Vec<string::String>, } #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] struct Outer { inner: Vec<Inner>, } fn mk_object(items: &[(string::String, Json)]) -> Json { let mut d = BTreeMap::new(); for item in items { match *item { (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); }, } }; Object(d) } #[test] fn test_from_str_trait() { let s = "null"; assert!(s.parse::<Json>().unwrap() == s.parse().unwrap()); } #[test] fn test_write_null() { assert_eq!(Null.to_string(), "null"); assert_eq!(Null.pretty().to_string(), "null"); } #[test] fn test_write_i64() { assert_eq!(U64(0).to_string(), "0"); assert_eq!(U64(0).pretty().to_string(), "0"); assert_eq!(U64(1234).to_string(), "1234"); assert_eq!(U64(1234).pretty().to_string(), "1234"); assert_eq!(I64(-5678).to_string(), "-5678"); assert_eq!(I64(-5678).pretty().to_string(), "-5678"); assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000"); assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000"); } #[test] fn test_write_f64() { assert_eq!(F64(3.0).to_string(), "3.0"); assert_eq!(F64(3.0).pretty().to_string(), "3.0"); assert_eq!(F64(3.1).to_string(), "3.1"); assert_eq!(F64(3.1).pretty().to_string(), "3.1"); assert_eq!(F64(-1.5).to_string(), "-1.5"); assert_eq!(F64(-1.5).pretty().to_string(), "-1.5"); assert_eq!(F64(0.5).to_string(), "0.5"); assert_eq!(F64(0.5).pretty().to_string(), "0.5"); assert_eq!(F64(f64::NAN).to_string(), "null"); assert_eq!(F64(f64::NAN).pretty().to_string(), "null"); assert_eq!(F64(f64::INFINITY).to_string(), "null"); assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null"); assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null"); assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null"); } #[test] fn test_write_str() { assert_eq!(String("".to_string()).to_string(), "\"\""); assert_eq!(String("".to_string()).pretty().to_string(), "\"\""); assert_eq!(String("homura".to_string()).to_string(), "\"homura\""); assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\""); } #[test] fn test_write_bool() { assert_eq!(Boolean(true).to_string(), "true"); assert_eq!(Boolean(true).pretty().to_string(), "true"); assert_eq!(Boolean(false).to_string(), "false"); assert_eq!(Boolean(false).pretty().to_string(), "false"); } #[test] fn test_write_array() { assert_eq!(Array(vec![]).to_string(), "[]"); assert_eq!(Array(vec![]).pretty().to_string(), "[]"); assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]"); assert_eq!( Array(vec![Boolean(true)]).pretty().to_string(), "\ [\n \ true\n\ ]" ); let long_test_array = Array(vec![ Boolean(false), Null, Array(vec![String("foo\nbar".to_string()), F64(3.5)])]); assert_eq!(long_test_array.to_string(), "[false,null,[\"foo\\nbar\",3.5]]"); assert_eq!( long_test_array.pretty().to_string(), "\ [\n \ false,\n \ null,\n \ [\n \ \"foo\\nbar\",\n \ 3.5\n \ ]\n\ ]" ); } #[test] fn test_write_object() { assert_eq!(mk_object(&[]).to_string(), "{}"); assert_eq!(mk_object(&[]).pretty().to_string(), "{}"); assert_eq!( mk_object(&[ ("a".to_string(), Boolean(true)) ]).to_string(), "{\"a\":true}" ); assert_eq!( mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(), "\ {\n \ \"a\": true\n\ }" ); let complex_obj = mk_object(&[ ("b".to_string(), Array(vec![ mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), mk_object(&[("d".to_string(), String("".to_string()))]) ])) ]); assert_eq!( complex_obj.to_string(), "{\ \"b\":[\ {\"c\":\"\\f\\r\"},\ {\"d\":\"\"}\ ]\ }" ); assert_eq!( complex_obj.pretty().to_string(), "\ {\n \ \"b\": [\n \ {\n \ \"c\": \"\\f\\r\"\n \ },\n \ {\n \ \"d\": \"\"\n \ }\n \ ]\n\ }" ); let a = mk_object(&[ ("a".to_string(), Boolean(true)), ("b".to_string(), Array(vec![ mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), mk_object(&[("d".to_string(), String("".to_string()))]) ])) ]); // We can't compare the strings directly because the object fields be // printed in a different order. assert_eq!(a.clone(), a.to_string().parse().unwrap()); assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap()); } #[test] fn test_write_enum() { let animal = Dog; assert_eq!( super::as_json(&animal).to_string(), "\"Dog\"" ); assert_eq!( super::as_pretty_json(&animal).to_string(), "\"Dog\"" ); let animal = Frog("Henry".to_string(), 349); assert_eq!( super::as_json(&animal).to_string(), "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}" ); assert_eq!( super::as_pretty_json(&animal).to_string(), "{\n \ \"variant\": \"Frog\",\n \ \"fields\": [\n \ \"Henry\",\n \ 349\n \ ]\n\ }" ); } macro_rules! check_encoder_for_simple { ($value:expr, $expected:expr) => ({ let s = super::as_json(&$value).to_string(); assert_eq!(s, $expected); let s = super::as_pretty_json(&$value).to_string(); assert_eq!(s, $expected); }) } #[test] fn test_write_some() { check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\""); } #[test] fn test_write_none() { check_encoder_for_simple!(None::<string::String>, "null"); } #[test] fn test_write_char() { check_encoder_for_simple!('a', "\"a\""); check_encoder_for_simple!('\t', "\"\\t\""); check_encoder_for_simple!('\u{0000}', "\"\\u0000\""); check_encoder_for_simple!('\u{001b}', "\"\\u001b\""); check_encoder_for_simple!('\u{007f}', "\"\\u007f\""); check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\""); check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\""); check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\""); } #[test] fn test_trailing_characters() { assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6))); assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2))); assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3))); assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3))); } #[test] fn test_read_identifiers() { assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3))); assert_eq!(from_str("null"), Ok(Null)); assert_eq!(from_str("true"), Ok(Boolean(true))); assert_eq!(from_str("false"), Ok(Boolean(false))); assert_eq!(from_str(" null "), Ok(Null)); assert_eq!(from_str(" true "), Ok(Boolean(true))); assert_eq!(from_str(" false "), Ok(Boolean(false))); } #[test] fn test_decode_identifiers() { let v: () = super::decode("null").unwrap(); assert_eq!(v, ()); let v: bool = super::decode("true").unwrap(); assert_eq!(v, true); let v: bool = super::decode("false").unwrap(); assert_eq!(v, false); } #[test] fn test_read_number() { assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1))); assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1))); assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1))); assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2))); assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2))); assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3))); assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3))); assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4))); assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20))); assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21))); assert_eq!(from_str("3"), Ok(U64(3))); assert_eq!(from_str("3.1"), Ok(F64(3.1))); assert_eq!(from_str("-1.2"), Ok(F64(-1.2))); assert_eq!(from_str("0.4"), Ok(F64(0.4))); assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5))); assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15))); assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01))); assert_eq!(from_str(" 3 "), Ok(U64(3))); assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN))); assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64))); assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX))); } #[test] fn test_decode_numbers() { let v: f64 = super::decode("3").unwrap(); assert_eq!(v, 3.0); let v: f64 = super::decode("3.1").unwrap(); assert_eq!(v, 3.1); let v: f64 = super::decode("-1.2").unwrap(); assert_eq!(v, -1.2); let v: f64 = super::decode("0.4").unwrap(); assert_eq!(v, 0.4); let v: f64 = super::decode("0.4e5").unwrap(); assert_eq!(v, 0.4e5); let v: f64 = super::decode("0.4e15").unwrap(); assert_eq!(v, 0.4e15); let v: f64 = super::decode("0.4e-01").unwrap(); assert_eq!(v, 0.4e-01); let v: u64 = super::decode("0").unwrap(); assert_eq!(v, 0); let v: u64 = super::decode("18446744073709551615").unwrap(); assert_eq!(v, u64::MAX); let v: i64 = super::decode("-9223372036854775808").unwrap(); assert_eq!(v, i64::MIN); let v: i64 = super::decode("9223372036854775807").unwrap(); assert_eq!(v, i64::MAX); let res: DecodeResult<i64> = super::decode("765.25"); assert_eq!(res, Err(ExpectedError("Integer".to_string(), "765.25".to_string()))); } #[test] fn test_read_str() { assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2))); assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5))); assert_eq!(from_str("\"\""), Ok(String("".to_string()))); assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string()))); assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string()))); assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string()))); assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string()))); assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string()))); assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string()))); assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string()))); assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string()))); assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string()))); } #[test] fn test_decode_str() { let s = [("\"\"", ""), ("\"foo\"", "foo"), ("\"\\\"\"", "\""), ("\"\\b\"", "\x08"), ("\"\\n\"", "\n"), ("\"\\r\"", "\r"), ("\"\\t\"", "\t"), ("\"\\u12ab\"", "\u{12ab}"), ("\"\\uAB12\"", "\u{AB12}")]; for &(i, o) in &s { let v: string::String = super::decode(i).unwrap(); assert_eq!(v, o); } } #[test] fn test_read_array() { assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("[]"), Ok(Array(vec![]))); assert_eq!(from_str("[ ]"), Ok(Array(vec![]))); assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)]))); assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)]))); assert_eq!(from_str("[null]"), Ok(Array(vec![Null]))); assert_eq!(from_str("[3, 1]"), Ok(Array(vec![U64(3), U64(1)]))); assert_eq!(from_str("\n[3, 2]\n"), Ok(Array(vec![U64(3), U64(2)]))); assert_eq!(from_str("[2, [4, 1]]"), Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])]))); } #[test] fn test_decode_array() { let v: Vec<()> = super::decode("[]").unwrap(); assert_eq!(v, []); let v: Vec<()> = super::decode("[null]").unwrap(); assert_eq!(v, [()]); let v: Vec<bool> = super::decode("[true]").unwrap(); assert_eq!(v, [true]); let v: Vec<isize> = super::decode("[3, 1]").unwrap(); assert_eq!(v, [3, 1]); let v: Vec<Vec<usize>> = super::decode("[[3], [1, 2]]").unwrap(); assert_eq!(v, [vec![3], vec![1, 2]]); } #[test] fn test_decode_tuple() { let t: (usize, usize, usize) = super::decode("[1, 2, 3]").unwrap(); assert_eq!(t, (1, 2, 3)); let t: (usize, string::String) = super::decode("[1, \"two\"]").unwrap(); assert_eq!(t, (1, "two".to_string())); } #[test] fn test_decode_tuple_malformed_types() { assert!(super::decode::<(usize, string::String)>("[1, 2]").is_err()); } #[test] fn test_decode_tuple_malformed_length() { assert!(super::decode::<(usize, usize)>("[1, 2, 3]").is_err()); } #[test] fn test_read_object() { assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2))); assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3))); assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2))); assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5))); assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6))); assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6))); assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7))); assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8))); assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8))); assert_eq!(from_str("{}").unwrap(), mk_object(&[])); assert_eq!(from_str("{\"a\": 3}").unwrap(), mk_object(&[("a".to_string(), U64(3))])); assert_eq!(from_str( "{ \"a\": null, \"b\" : true }").unwrap(), mk_object(&[ ("a".to_string(), Null), ("b".to_string(), Boolean(true))])); assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(), mk_object(&[ ("a".to_string(), Null), ("b".to_string(), Boolean(true))])); assert_eq!(from_str( "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(), mk_object(&[ ("a".to_string(), F64(1.0)), ("b".to_string(), Array(vec![Boolean(true)])) ])); assert_eq!(from_str( "{\ \"a\": 1.0, \ \"b\": [\ true,\ \"foo\\nbar\", \ { \"c\": {\"d\": null} } \ ]\ }").unwrap(), mk_object(&[ ("a".to_string(), F64(1.0)), ("b".to_string(), Array(vec![ Boolean(true), String("foo\nbar".to_string()), mk_object(&[ ("c".to_string(), mk_object(&[("d".to_string(), Null)])) ]) ])) ])); } #[test] fn test_decode_struct() { let s = "{ \"inner\": [ { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] } ] }"; let v: Outer = super::decode(s).unwrap(); assert_eq!( v, Outer { inner: vec![ Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] } ] } ); } #[derive(RustcDecodable)] struct FloatStruct { f: f64, a: Vec<f64> } #[test] fn test_decode_struct_with_nan() { let s = "{\"f\":null,\"a\":[null,123]}"; let obj: FloatStruct = super::decode(s).unwrap(); assert!(obj.f.is_nan()); assert!(obj.a[0].is_nan()); assert_eq!(obj.a[1], 123f64); } #[test] fn test_decode_option() { let value: Option<string::String> = super::decode("null").unwrap(); assert_eq!(value, None); let value: Option<string::String> = super::decode("\"jodhpurs\"").unwrap(); assert_eq!(value, Some("jodhpurs".to_string())); } #[test] fn test_decode_enum() { let value: Animal = super::decode("\"Dog\"").unwrap(); assert_eq!(value, Dog); let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"; let value: Animal = super::decode(s).unwrap(); assert_eq!(value, Frog("Henry".to_string(), 349)); } #[test] fn test_decode_map() { let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\ \"fields\":[\"Henry\", 349]}}"; let mut map: BTreeMap<string::String, Animal> = super::decode(s).unwrap(); assert_eq!(map.remove(&"a".to_string()), Some(Dog)); assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349))); } #[test] fn test_multiline_errors() { assert_eq!(from_str("{\n \"foo\":\n \"bar\""), Err(SyntaxError(EOFWhileParsingObject, 3, 8))); } #[derive(RustcDecodable)] #[allow(dead_code)] struct DecodeStruct { x: f64, y: bool, z: string::String, w: Vec<DecodeStruct> } #[derive(RustcDecodable)] enum DecodeEnum { A(f64), B(string::String) } fn check_err<T: Decodable>(to_parse: &'static str, expected: DecoderError) { let res: DecodeResult<T> = match from_str(to_parse) { Err(e) => Err(ParseError(e)), Ok(json) => Decodable::decode(&mut Decoder::new(json)) }; match res { Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`", to_parse, expected), Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}", to_parse, e), Err(e) => { assert_eq!(e, expected); } } } #[test] fn test_decode_errors_struct() { check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string())); check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}", ExpectedError("Number".to_string(), "true".to_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}", ExpectedError("Boolean".to_string(), "[]".to_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}", ExpectedError("String".to_string(), "{}".to_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}", ExpectedError("Array".to_string(), "null".to_string())); check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}", MissingFieldError("w".to_string())); } #[test] fn test_decode_errors_enum() { check_err::<DecodeEnum>("{}", MissingFieldError("variant".to_string())); check_err::<DecodeEnum>("{\"variant\": 1}", ExpectedError("String".to_string(), "1".to_string())); check_err::<DecodeEnum>("{\"variant\": \"A\"}", MissingFieldError("fields".to_string())); check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}", ExpectedError("Array".to_string(), "null".to_string())); check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}", UnknownVariantError("C".to_string())); } #[test] fn test_find(){ let json_value = from_str("{\"dog\" : \"cat\"}").unwrap(); let found_str = json_value.find("dog"); assert!(found_str.unwrap().as_string().unwrap() == "cat"); } #[test] fn test_find_path(){ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); let found_str = json_value.find_path(&["dog", "cat", "mouse"]); assert!(found_str.unwrap().as_string().unwrap() == "cheese"); } #[test] fn test_search(){ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); let found_str = json_value.search("mouse").and_then(|j| j.as_string()); assert!(found_str.unwrap() == "cheese"); } #[test] fn test_index(){ let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap(); let ref array = json_value["animals"]; assert_eq!(array[0].as_string().unwrap(), "dog"); assert_eq!(array[1].as_string().unwrap(), "cat"); assert_eq!(array[2].as_string().unwrap(), "mouse"); } #[test] fn test_is_object(){ let json_value = from_str("{}").unwrap(); assert!(json_value.is_object()); } #[test] fn test_as_object(){ let json_value = from_str("{}").unwrap(); let json_object = json_value.as_object(); assert!(json_object.is_some()); } #[test] fn test_is_array(){ let json_value = from_str("[1, 2, 3]").unwrap(); assert!(json_value.is_array()); } #[test] fn test_as_array(){ let json_value = from_str("[1, 2, 3]").unwrap(); let json_array = json_value.as_array(); let expected_length = 3; assert!(json_array.is_some() && json_array.unwrap().len() == expected_length); } #[test] fn test_is_string(){ let json_value = from_str("\"dog\"").unwrap(); assert!(json_value.is_string()); } #[test] fn test_as_string(){ let json_value = from_str("\"dog\"").unwrap(); let json_str = json_value.as_string(); let expected_str = "dog"; assert_eq!(json_str, Some(expected_str)); } #[test] fn test_is_number(){ let json_value = from_str("12").unwrap(); assert!(json_value.is_number()); } #[test] fn test_is_i64(){ let json_value = from_str("-12").unwrap(); assert!(json_value.is_i64()); let json_value = from_str("12").unwrap(); assert!(!json_value.is_i64()); let json_value = from_str("12.0").unwrap(); assert!(!json_value.is_i64()); } #[test] fn test_is_u64(){ let json_value = from_str("12").unwrap(); assert!(json_value.is_u64()); let json_value = from_str("-12").unwrap(); assert!(!json_value.is_u64()); let json_value = from_str("12.0").unwrap(); assert!(!json_value.is_u64()); } #[test] fn test_is_f64(){ let json_value = from_str("12").unwrap(); assert!(!json_value.is_f64()); let json_value = from_str("-12").unwrap(); assert!(!json_value.is_f64()); let json_value = from_str("12.0").unwrap(); assert!(json_value.is_f64()); let json_value = from_str("-12.0").unwrap(); assert!(json_value.is_f64()); } #[test] fn test_as_i64(){ let json_value = from_str("-12").unwrap(); let json_num = json_value.as_i64(); assert_eq!(json_num, Some(-12)); } #[test] fn test_as_u64(){ let json_value = from_str("12").unwrap(); let json_num = json_value.as_u64(); assert_eq!(json_num, Some(12)); } #[test] fn test_as_f64(){ let json_value = from_str("12.0").unwrap(); let json_num = json_value.as_f64(); assert_eq!(json_num, Some(12f64)); } #[test] fn test_is_boolean(){ let json_value = from_str("false").unwrap(); assert!(json_value.is_boolean()); } #[test] fn test_as_boolean(){ let json_value = from_str("false").unwrap(); let json_bool = json_value.as_boolean(); let expected_bool = false; assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool); } #[test] fn test_is_null(){ let json_value = from_str("null").unwrap(); assert!(json_value.is_null()); } #[test] fn test_as_null(){ let json_value = from_str("null").unwrap(); let json_null = json_value.as_null(); let expected_null = (); assert!(json_null.is_some() && json_null.unwrap() == expected_null); } #[test] fn test_encode_hashmap_with_numeric_key() { use std::str::from_utf8; use std::collections::HashMap; let mut hm: HashMap<usize, bool> = HashMap::new(); hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); let json_str = from_utf8(&mem_buf[..]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), _ => {} // it parsed and we are good to go } } #[test] fn test_prettyencode_hashmap_with_numeric_key() { use std::str::from_utf8; use std::collections::HashMap; let mut hm: HashMap<usize, bool> = HashMap::new(); hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); let json_str = from_utf8(&mem_buf[..]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), _ => {} // it parsed and we are good to go } } #[test] fn test_prettyencoder_indent_level_param() { use std::str::from_utf8; use std::collections::BTreeMap; let mut tree = BTreeMap::new(); tree.insert("hello".to_string(), String("guten tag".to_string())); tree.insert("goodbye".to_string(), String("sayonara".to_string())); let json = Array( // The following layout below should look a lot like // the pretty-printed JSON (indent * x) vec! ( // 0x String("greetings".to_string()), // 1x Object(tree), // 1x + 2x + 2x + 1x ) // 0x // End JSON array (7 lines) ); // Helper function for counting indents fn indents(source: &str) -> usize { let trimmed = source.trim_left_matches(' '); source.len() - trimmed.len() } // Test up to 4 spaces of indents (more?) for i in 0..4 { let mut writer = Vec::new(); write!(&mut writer, "{}", super::as_pretty_json(&json).indent(i)).unwrap(); let printed = from_utf8(&writer[..]).unwrap(); // Check for indents at each line let lines: Vec<&str> = printed.lines().collect(); assert_eq!(lines.len(), 7); // JSON should be 7 lines assert_eq!(indents(lines[0]), 0 * i); // [ assert_eq!(indents(lines[1]), 1 * i); // "greetings", assert_eq!(indents(lines[2]), 1 * i); // { assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag", assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara" assert_eq!(indents(lines[5]), 1 * i); // }, assert_eq!(indents(lines[6]), 0 * i); // ] // Finally, test that the pretty-printed JSON is valid from_str(printed).ok().expect("Pretty-printed JSON is invalid!"); } } #[test] fn test_hashmap_with_enum_key() { use std::collections::HashMap; use json; #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)] enum Enum { Foo, #[allow(dead_code)] Bar, } let mut map = HashMap::new(); map.insert(Enum::Foo, 0); let result = json::encode(&map).unwrap(); assert_eq!(&result[..], r#"{"Foo":0}"#); let decoded: HashMap<Enum, _> = json::decode(&result).unwrap(); assert_eq!(map, decoded); } #[test] fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() { use std::collections::HashMap; use Decodable; let json_str = "{\"1\":true}"; let json_obj = match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), Ok(o) => o }; let mut decoder = Decoder::new(json_obj); let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap(); } #[test] fn test_hashmap_with_numeric_key_will_error_with_string_keys() { use std::collections::HashMap; use Decodable; let json_str = "{\"a\":true}"; let json_obj = match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), Ok(o) => o }; let mut decoder = Decoder::new(json_obj); let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder); assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string()))); } fn assert_stream_equal(src: &str, expected: Vec<(JsonEvent, Vec<StackElement>)>) { let mut parser = Parser::new(src.chars()); let mut i = 0; loop { let evt = match parser.next() { Some(e) => e, None => { break; } }; let (ref expected_evt, ref expected_stack) = expected[i]; if !parser.stack().is_equal_to(expected_stack) { panic!("Parser stack is not equal to {:?}", expected_stack); } assert_eq!(&evt, expected_evt); i+=1; } } #[test] fn test_streaming_parser() { assert_stream_equal( r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#, vec![ (ObjectStart, vec![]), (StringValue("bar".to_string()), vec![StackElement::Key("foo")]), (ArrayStart, vec![StackElement::Key("array")]), (U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]), (U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]), (U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]), (U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]), (U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]), (U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]), (ArrayEnd, vec![StackElement::Key("array")]), (ArrayStart, vec![StackElement::Key("idents")]), (NullValue, vec![StackElement::Key("idents"), StackElement::Index(0)]), (BooleanValue(true), vec![StackElement::Key("idents"), StackElement::Index(1)]), (BooleanValue(false), vec![StackElement::Key("idents"), StackElement::Index(2)]), (ArrayEnd, vec![StackElement::Key("idents")]), (ObjectEnd, vec![]), ] ); } fn last_event(src: &str) -> JsonEvent { let mut parser = Parser::new(src.chars()); let mut evt = NullValue; loop { evt = match parser.next() { Some(e) => e, None => return evt, } } } #[test] fn test_read_object_streaming() { assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3))); assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2))); assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5))); assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6))); assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6))); assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7))); assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8))); assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8))); assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8))); assert_stream_equal( "{}", vec![(ObjectStart, vec![]), (ObjectEnd, vec![])] ); assert_stream_equal( "{\"a\": 3}", vec![ (ObjectStart, vec![]), (U64Value(3), vec![StackElement::Key("a")]), (ObjectEnd, vec![]), ] ); assert_stream_equal( "{ \"a\": null, \"b\" : true }", vec![ (ObjectStart, vec![]), (NullValue, vec![StackElement::Key("a")]), (BooleanValue(true), vec![StackElement::Key("b")]), (ObjectEnd, vec![]), ] ); assert_stream_equal( "{\"a\" : 1.0 ,\"b\": [ true ]}", vec![ (ObjectStart, vec![]), (F64Value(1.0), vec![StackElement::Key("a")]), (ArrayStart, vec![StackElement::Key("b")]), (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]), (ArrayEnd, vec![StackElement::Key("b")]), (ObjectEnd, vec![]), ] ); assert_stream_equal( r#"{ "a": 1.0, "b": [ true, "foo\nbar", { "c": {"d": null} } ] }"#, vec![ (ObjectStart, vec![]), (F64Value(1.0), vec![StackElement::Key("a")]), (ArrayStart, vec![StackElement::Key("b")]), (BooleanValue(true), vec![StackElement::Key("b"), StackElement::Index(0)]), (StringValue("foo\nbar".to_string()), vec![StackElement::Key("b"), StackElement::Index(1)]), (ObjectStart, vec![StackElement::Key("b"), StackElement::Index(2)]), (ObjectStart, vec![StackElement::Key("b"), StackElement::Index(2), StackElement::Key("c")]), (NullValue, vec![StackElement::Key("b"), StackElement::Index(2), StackElement::Key("c"), StackElement::Key("d")]), (ObjectEnd, vec![StackElement::Key("b"), StackElement::Index(2), StackElement::Key("c")]), (ObjectEnd, vec![StackElement::Key("b"), StackElement::Index(2)]), (ArrayEnd, vec![StackElement::Key("b")]), (ObjectEnd, vec![]), ] ); } #[test] fn test_read_array_streaming() { assert_stream_equal( "[]", vec![ (ArrayStart, vec![]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[ ]", vec![ (ArrayStart, vec![]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[true]", vec![ (ArrayStart, vec![]), (BooleanValue(true), vec![StackElement::Index(0)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[ false ]", vec![ (ArrayStart, vec![]), (BooleanValue(false), vec![StackElement::Index(0)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[null]", vec![ (ArrayStart, vec![]), (NullValue, vec![StackElement::Index(0)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[3, 1]", vec![ (ArrayStart, vec![]), (U64Value(3), vec![StackElement::Index(0)]), (U64Value(1), vec![StackElement::Index(1)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "\n[3, 2]\n", vec![ (ArrayStart, vec![]), (U64Value(3), vec![StackElement::Index(0)]), (U64Value(2), vec![StackElement::Index(1)]), (ArrayEnd, vec![]), ] ); assert_stream_equal( "[2, [4, 1]]", vec![ (ArrayStart, vec![]), (U64Value(2), vec![StackElement::Index(0)]), (ArrayStart, vec![StackElement::Index(1)]), (U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]), (U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]), (ArrayEnd, vec![StackElement::Index(1)]), (ArrayEnd, vec![]), ] ); assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2))); assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); } #[test] fn test_trailing_characters_streaming() { assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5))); assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6))); assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2))); assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3))); assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3))); } #[test] fn test_read_identifiers_streaming() { assert_eq!(Parser::new("null".chars()).next(), Some(NullValue)); assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true))); assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false))); assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4))); assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2))); assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3))); } #[test] fn test_stack() { let mut stack = Stack::new(); assert!(stack.is_empty()); assert!(stack.is_empty()); assert!(!stack.last_is_index()); stack.push_index(0); stack.bump_index(); assert!(stack.len() == 1); assert!(stack.is_equal_to(&[StackElement::Index(1)])); assert!(stack.starts_with(&[StackElement::Index(1)])); assert!(stack.ends_with(&[StackElement::Index(1)])); assert!(stack.last_is_index()); assert!(stack.get(0) == StackElement::Index(1)); stack.push_key("foo".to_string()); assert!(stack.len() == 2); assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")])); assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); assert!(stack.starts_with(&[StackElement::Index(1)])); assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")])); assert!(stack.ends_with(&[StackElement::Key("foo")])); assert!(!stack.last_is_index()); assert!(stack.get(0) == StackElement::Index(1)); assert!(stack.get(1) == StackElement::Key("foo")); stack.push_key("bar".to_string()); assert!(stack.len() == 3); assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo"), StackElement::Key("bar")])); assert!(stack.starts_with(&[StackElement::Index(1)])); assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo"), StackElement::Key("bar")])); assert!(stack.ends_with(&[StackElement::Key("bar")])); assert!(stack.ends_with(&[StackElement::Key("foo"), StackElement::Key("bar")])); assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo"), StackElement::Key("bar")])); assert!(!stack.last_is_index()); assert!(stack.get(0) == StackElement::Index(1)); assert!(stack.get(1) == StackElement::Key("foo")); assert!(stack.get(2) == StackElement::Key("bar")); stack.pop(); assert!(stack.len() == 2); assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")])); assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); assert!(stack.starts_with(&[StackElement::Index(1)])); assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")])); assert!(stack.ends_with(&[StackElement::Key("foo")])); assert!(!stack.last_is_index()); assert!(stack.get(0) == StackElement::Index(1)); assert!(stack.get(1) == StackElement::Key("foo")); } #[test] fn test_to_json() { use std::collections::{HashMap,BTreeMap}; use super::ToJson; let array2 = Array(vec![U64(1), U64(2)]); let array3 = Array(vec![U64(1), U64(2), U64(3)]); let object = { let mut tree_map = BTreeMap::new(); tree_map.insert("a".to_string(), U64(1)); tree_map.insert("b".to_string(), U64(2)); Object(tree_map) }; assert_eq!(array2.to_json(), array2); assert_eq!(object.to_json(), object); assert_eq!(3_isize.to_json(), I64(3)); assert_eq!(4_i8.to_json(), I64(4)); assert_eq!(5_i16.to_json(), I64(5)); assert_eq!(6_i32.to_json(), I64(6)); assert_eq!(7_i64.to_json(), I64(7)); assert_eq!(8_usize.to_json(), U64(8)); assert_eq!(9_u8.to_json(), U64(9)); assert_eq!(10_u16.to_json(), U64(10)); assert_eq!(11_u32.to_json(), U64(11)); assert_eq!(12_u64.to_json(), U64(12)); assert_eq!(13.0_f32.to_json(), F64(13.0_f64)); assert_eq!(14.0_f64.to_json(), F64(14.0_f64)); assert_eq!(().to_json(), Null); assert_eq!(f32::INFINITY.to_json(), Null); assert_eq!(f64::NAN.to_json(), Null); assert_eq!(true.to_json(), Boolean(true)); assert_eq!(false.to_json(), Boolean(false)); assert_eq!("abc".to_json(), String("abc".to_string())); assert_eq!("abc".to_string().to_json(), String("abc".to_string())); assert_eq!((1_usize, 2_usize).to_json(), array2); assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3); assert_eq!([1_usize, 2_usize].to_json(), array2); assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3); assert_eq!((vec![1_usize, 2_usize]).to_json(), array2); assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3); let mut tree_map = BTreeMap::new(); tree_map.insert("a".to_string(), 1 as usize); tree_map.insert("b".to_string(), 2); assert_eq!(tree_map.to_json(), object); let mut hash_map = HashMap::new(); hash_map.insert("a".to_string(), 1 as usize); hash_map.insert("b".to_string(), 2); assert_eq!(hash_map.to_json(), object); assert_eq!(Some(15).to_json(), I64(15)); assert_eq!(Some(15 as usize).to_json(), U64(15)); assert_eq!(None::<isize>.to_json(), Null); } #[test] fn test_encode_hashmap_with_arbitrary_key() { use std::collections::HashMap; #[derive(PartialEq, Eq, Hash, RustcEncodable)] struct ArbitraryType(usize); let mut hm: HashMap<ArbitraryType, bool> = HashMap::new(); hm.insert(ArbitraryType(1), true); let mut mem_buf = string::String::new(); let mut encoder = Encoder::new(&mut mem_buf); let result = hm.encode(&mut encoder); match result.unwrap_err() { EncoderError::BadHashmapKey => (), _ => panic!("expected bad hash map key") } } #[bench] fn bench_streaming_small(b: &mut Bencher) { b.iter( || { let mut parser = Parser::new( r#"{ "a": 1.0, "b": [ true, "foo\nbar", { "c": {"d": null} } ] }"#.chars() ); loop { match parser.next() { None => return, _ => {} } } }); } #[bench] fn bench_small(b: &mut Bencher) { b.iter( || { let _ = from_str(r#"{ "a": 1.0, "b": [ true, "foo\nbar", { "c": {"d": null} } ] }"#); }); } fn big_json() -> string::String { let mut src = "[\n".to_string(); for _ in 0..500 { src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \ [1,2,3]},"#); } src.push_str("{}]"); return src; } #[bench] fn bench_streaming_large(b: &mut Bencher) { let src = big_json(); b.iter( || { let mut parser = Parser::new(src.chars()); loop { match parser.next() { None => return, _ => {} } } }); } #[bench] fn bench_large(b: &mut Bencher) { let src = big_json(); b.iter( || { let _ = from_str(&src); }); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libserialize/hex.rs
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Hex binary-to-text encoding pub use self::FromHexError::*; use std::fmt; use std::error; /// A trait for converting a value to hexadecimal encoding pub trait ToHex { /// Converts the value of `self` to a hex value, returning the owned /// string. fn to_hex(&self) -> String; } const CHARS: &[u8] = b"0123456789abcdef"; impl ToHex for [u8] { /// Turn a vector of `u8` bytes into a hexadecimal string. /// /// # Examples /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::ToHex; /// /// fn main () { /// let str = [52,32].to_hex(); /// println!("{}", str); /// } /// ``` fn to_hex(&self) -> String { let mut v = Vec::with_capacity(self.len() * 2); for &byte in self { v.push(CHARS[(byte >> 4) as usize]); v.push(CHARS[(byte & 0xf) as usize]); } unsafe { String::from_utf8_unchecked(v) } } } /// A trait for converting hexadecimal encoded values pub trait FromHex { /// Converts the value of `self`, interpreted as hexadecimal encoded data, /// into an owned vector of bytes, returning the vector. fn from_hex(&self) -> Result<Vec<u8>, FromHexError>; } /// Errors that can occur when decoding a hex encoded string #[derive(Copy, Clone, Debug)] pub enum FromHexError { /// The input contained a character not part of the hex format InvalidHexCharacter(char, usize), /// The input had an invalid length InvalidHexLength, } impl fmt::Display for FromHexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { InvalidHexCharacter(ch, idx) => write!(f, "Invalid character '{}' at position {}", ch, idx), InvalidHexLength => write!(f, "Invalid input length"), } } } impl error::Error for FromHexError { fn description(&self) -> &str { match *self { InvalidHexCharacter(..) => "invalid character", InvalidHexLength => "invalid length", } } } impl FromHex for str { /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) /// to the byte values it encodes. /// /// You can use the `String::from_utf8` function to turn a /// `Vec<u8>` into a string with characters corresponding to those values. /// /// # Examples /// /// This converts a string literal to hexadecimal and back. /// /// ``` /// #![feature(rustc_private)] /// /// extern crate serialize; /// use serialize::hex::{FromHex, ToHex}; /// /// fn main () { /// let hello_str = "Hello, World".as_bytes().to_hex(); /// println!("{}", hello_str); /// let bytes = hello_str.from_hex().unwrap(); /// println!("{:?}", bytes); /// let result_str = String::from_utf8(bytes).unwrap(); /// println!("{}", result_str); /// } /// ``` fn from_hex(&self) -> Result<Vec<u8>, FromHexError> { // This may be an overestimate if there is any whitespace let mut b = Vec::with_capacity(self.len() / 2); let mut modulus = 0; let mut buf = 0; for (idx, byte) in self.bytes().enumerate() { buf <<= 4; match byte { b'A'..=b'F' => buf |= byte - b'A' + 10, b'a'..=b'f' => buf |= byte - b'a' + 10, b'0'..=b'9' => buf |= byte - b'0', b' '|b'\r'|b'\n'|b'\t' => { buf >>= 4; continue } _ => { let ch = self[idx..].chars().next().unwrap(); return Err(InvalidHexCharacter(ch, idx)) } } modulus += 1; if modulus == 2 { modulus = 0; b.push(buf); } } match modulus { 0 => Ok(b.into_iter().collect()), _ => Err(InvalidHexLength), } } } #[cfg(test)] mod tests { extern crate test; use self::test::Bencher; use hex::{FromHex, ToHex}; #[test] pub fn test_to_hex() { assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172"); } #[test] pub fn test_from_hex_okay() { assert_eq!("666f6f626172".from_hex().unwrap(), b"foobar"); assert_eq!("666F6F626172".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_from_hex_odd_len() { assert!("666".from_hex().is_err()); assert!("66 6".from_hex().is_err()); } #[test] pub fn test_from_hex_invalid_char() { assert!("66y6".from_hex().is_err()); } #[test] pub fn test_from_hex_ignores_whitespace() { assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(), b"foobar"); } #[test] pub fn test_to_hex_all_bytes() { for i in 0..256 { assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize)); } } #[test] pub fn test_from_hex_all_bytes() { for i in 0..256 { let ii: &[u8] = &[i as u8]; assert_eq!(format!("{:02x}", i as usize).from_hex() .unwrap(), ii); assert_eq!(format!("{:02X}", i as usize).from_hex() .unwrap(), ii); } } #[bench] pub fn bench_to_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; b.iter(|| { s.as_bytes().to_hex(); }); b.bytes = s.len() as u64; } #[bench] pub fn bench_from_hex(b: &mut Bencher) { let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; let sb = s.as_bytes().to_hex(); b.iter(|| { sb.from_hex().unwrap(); }); b.bytes = sb.len() as u64; } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_cratesio_shim/Cargo.toml
# This crate exists to allow rustc to link certain crates from crates.io into # the distribution. This doesn't work normally because: # # - Cargo always builds dependencies as rlibs: # https://github.com/rust-lang/cargo/issues/629 # - rustc wants to avoid multiple definitions of the same symbol, so it refuses # to link multiple dylibs containing the same rlib # - multiple dylibs depend on the same crates.io crates # # This solution works by including all the conflicting rlibs in a single dylib, # which is then linked into all dylibs that depend on these crates.io crates. # The result is that each rlib only appears once, and things work! [package] authors = ["The Rust Project Developers"] name = "rustc_cratesio_shim" version = "0.0.0" [dependencies] bitflags = "1.0" log = "0.4"
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_cratesio_shim
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_cratesio_shim/src/lib.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // See Cargo.toml for a comment explaining this crate. #![allow(unused_extern_crates)] #![cfg_attr(not(stage0), feature(nll))] extern crate bitflags; extern crate log;
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/Cargo.toml
[package] authors = ["The Rust Project Developers"] name = "rustc_errors" version = "0.0.0" [lib] name = "rustc_errors" path = "lib.rs" [dependencies] serialize = { path = "../libserialize" } syntax_pos = { path = "../libsyntax_pos" } rustc_data_structures = { path = "../librustc_data_structures" } unicode-width = "0.1.4" atty = "0.2" termcolor = "0.3"
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/snippet.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Code for annotating snippets. use Level; #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct Line { pub line_index: usize, pub annotations: Vec<Annotation>, } #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct MultilineAnnotation { pub depth: usize, pub line_start: usize, pub line_end: usize, pub start_col: usize, pub end_col: usize, pub is_primary: bool, pub label: Option<String>, } impl MultilineAnnotation { pub fn increase_depth(&mut self) { self.depth += 1; } pub fn as_start(&self) -> Annotation { Annotation { start_col: self.start_col, end_col: self.start_col + 1, is_primary: self.is_primary, label: None, annotation_type: AnnotationType::MultilineStart(self.depth) } } pub fn as_end(&self) -> Annotation { Annotation { start_col: self.end_col.saturating_sub(1), end_col: self.end_col, is_primary: self.is_primary, label: self.label.clone(), annotation_type: AnnotationType::MultilineEnd(self.depth) } } pub fn as_line(&self) -> Annotation { Annotation { start_col: 0, end_col: 0, is_primary: self.is_primary, label: None, annotation_type: AnnotationType::MultilineLine(self.depth) } } } #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] pub enum AnnotationType { /// Annotation under a single line of code Singleline, /// Annotation enclosing the first and last character of a multiline span Multiline(MultilineAnnotation), // The Multiline type above is replaced with the following three in order // to reuse the current label drawing code. // // Each of these corresponds to one part of the following diagram: // // x | foo(1 + bar(x, // | _________^ < MultilineStart // x | | y), < MultilineLine // | |______________^ label < MultilineEnd // x | z); /// Annotation marking the first character of a fully shown multiline span MultilineStart(usize), /// Annotation marking the last character of a fully shown multiline span MultilineEnd(usize), /// Line at the left enclosing the lines of a fully shown multiline span // Just a placeholder for the drawing algorithm, to know that it shouldn't skip the first 4 // and last 2 lines of code. The actual line is drawn in `emit_message_default` and not in // `draw_multiline_line`. MultilineLine(usize), } #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct Annotation { /// Start column, 0-based indexing -- counting *characters*, not /// utf-8 bytes. Note that it is important that this field goes /// first, so that when we sort, we sort orderings by start /// column. pub start_col: usize, /// End column within the line (exclusive) pub end_col: usize, /// Is this annotation derived from primary span pub is_primary: bool, /// Optional label to display adjacent to the annotation. pub label: Option<String>, /// Is this a single line, multiline or multiline span minimized down to a /// smaller span. pub annotation_type: AnnotationType, } impl Annotation { /// Whether this annotation is a vertical line placeholder. pub fn is_line(&self) -> bool { if let AnnotationType::MultilineLine(_) = self.annotation_type { true } else { false } } pub fn is_multiline(&self) -> bool { match self.annotation_type { AnnotationType::Multiline(_) | AnnotationType::MultilineStart(_) | AnnotationType::MultilineLine(_) | AnnotationType::MultilineEnd(_) => true, _ => false, } } pub fn len(&self) -> usize { // Account for usize underflows if self.end_col > self.start_col { self.end_col - self.start_col } else { self.start_col - self.end_col } } pub fn has_label(&self) -> bool { if let Some(ref label) = self.label { // Consider labels with no text as effectively not being there // to avoid weird output with unnecessary vertical lines, like: // // X | fn foo(x: u32) { // | -------^------ // | | | // | | // | // // Note that this would be the complete output users would see. label.len() > 0 } else { false } } pub fn takes_space(&self) -> bool { // Multiline annotations always have to keep vertical space. match self.annotation_type { AnnotationType::MultilineStart(_) | AnnotationType::MultilineEnd(_) => true, _ => false, } } } #[derive(Debug)] pub struct StyledString { pub text: String, pub style: Style, } #[derive(Copy, Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub enum Style { MainHeaderMsg, HeaderMsg, LineAndColumn, LineNumber, Quotation, UnderlinePrimary, UnderlineSecondary, LabelPrimary, LabelSecondary, OldSchoolNoteText, NoStyle, Level(Level), Highlight, }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/styled_buffer.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Code for creating styled buffers use snippet::{Style, StyledString}; #[derive(Debug)] pub struct StyledBuffer { text: Vec<Vec<char>>, styles: Vec<Vec<Style>>, } impl StyledBuffer { pub fn new() -> StyledBuffer { StyledBuffer { text: vec![], styles: vec![], } } fn replace_tabs(&mut self) { for (line_pos, line) in self.text.iter_mut().enumerate() { let mut tab_pos = vec![]; for (pos, c) in line.iter().enumerate() { if *c == '\t' { tab_pos.push(pos); } } // start with the tabs at the end of the line to replace them with 4 space chars for pos in tab_pos.iter().rev() { assert_eq!(line.remove(*pos), '\t'); // fix the position of the style to match up after replacing the tabs let s = self.styles[line_pos].remove(*pos); for _ in 0..4 { line.insert(*pos, ' '); self.styles[line_pos].insert(*pos, s); } } } } pub fn render(&mut self) -> Vec<Vec<StyledString>> { let mut output: Vec<Vec<StyledString>> = vec![]; let mut styled_vec: Vec<StyledString> = vec![]; // before we render, replace tabs with spaces self.replace_tabs(); for (row, row_style) in self.text.iter().zip(&self.styles) { let mut current_style = Style::NoStyle; let mut current_text = String::new(); for (&c, &s) in row.iter().zip(row_style) { if s != current_style { if !current_text.is_empty() { styled_vec.push(StyledString { text: current_text, style: current_style, }); } current_style = s; current_text = String::new(); } current_text.push(c); } if !current_text.is_empty() { styled_vec.push(StyledString { text: current_text, style: current_style, }); } // We're done with the row, push and keep going output.push(styled_vec); styled_vec = vec![]; } output } fn ensure_lines(&mut self, line: usize) { while line >= self.text.len() { self.text.push(vec![]); self.styles.push(vec![]); } } pub fn putc(&mut self, line: usize, col: usize, chr: char, style: Style) { self.ensure_lines(line); if col < self.text[line].len() { self.text[line][col] = chr; self.styles[line][col] = style; } else { let mut i = self.text[line].len(); while i < col { self.text[line].push(' '); self.styles[line].push(Style::NoStyle); i += 1; } self.text[line].push(chr); self.styles[line].push(style); } } pub fn puts(&mut self, line: usize, col: usize, string: &str, style: Style) { let mut n = col; for c in string.chars() { self.putc(line, n, c, style); n += 1; } } pub fn prepend(&mut self, line: usize, string: &str, style: Style) { self.ensure_lines(line); let string_len = string.len(); // Push the old content over to make room for new content for _ in 0..string_len { self.styles[line].insert(0, Style::NoStyle); self.text[line].insert(0, ' '); } self.puts(line, 0, string, style); } pub fn append(&mut self, line: usize, string: &str, style: Style) { if line >= self.text.len() { self.puts(line, 0, string, style); } else { let col = self.text[line].len(); self.puts(line, col, string, style); } } pub fn num_lines(&self) -> usize { self.text.len() } pub fn set_style_range(&mut self, line: usize, col_start: usize, col_end: usize, style: Style, overwrite: bool) { for col in col_start..col_end { self.set_style(line, col, style, overwrite); } } pub fn set_style(&mut self, line: usize, col: usize, style: Style, overwrite: bool) { if let Some(ref mut line) = self.styles.get_mut(line) { if let Some(s) = line.get_mut(col) { if *s == Style::NoStyle || *s == Style::Quotation || overwrite { *s = style; } } } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/registry.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::collections::HashMap; #[derive(Clone)] pub struct Registry { descriptions: HashMap<&'static str, &'static str>, } impl Registry { pub fn new(descriptions: &[(&'static str, &'static str)]) -> Registry { Registry { descriptions: descriptions.iter().cloned().collect() } } pub fn find_description(&self, code: &str) -> Option<&'static str> { self.descriptions.get(code).cloned() } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/lib.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(custom_attribute)] #![allow(unused_attributes)] #![feature(range_contains)] #![cfg_attr(unix, feature(libc))] #![cfg_attr(not(stage0), feature(nll))] #![feature(optin_builtin_traits)] extern crate atty; extern crate termcolor; #[cfg(unix)] extern crate libc; extern crate rustc_data_structures; extern crate serialize as rustc_serialize; extern crate syntax_pos; extern crate unicode_width; pub use emitter::ColorConfig; use self::Level::*; use emitter::{Emitter, EmitterWriter}; use rustc_data_structures::sync::{self, Lrc, Lock, LockCell}; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::stable_hasher::StableHasher; use std::borrow::Cow; use std::cell::Cell; use std::{error, fmt}; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering::SeqCst; use std::panic; use termcolor::{ColorSpec, Color}; mod diagnostic; mod diagnostic_builder; pub mod emitter; mod snippet; pub mod registry; mod styled_buffer; mod lock; use syntax_pos::{BytePos, Loc, FileLinesResult, SourceFile, FileName, MultiSpan, Span, NO_EXPANSION}; #[derive(Copy, Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub enum Applicability { MachineApplicable, HasPlaceholders, MaybeIncorrect, Unspecified } #[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub struct CodeSuggestion { /// Each substitute can have multiple variants due to multiple /// applicable suggestions /// /// `foo.bar` might be replaced with `a.b` or `x.y` by replacing /// `foo` and `bar` on their own: /// /// ``` /// vec![ /// Substitution { parts: vec![(0..3, "a"), (4..7, "b")] }, /// Substitution { parts: vec![(0..3, "x"), (4..7, "y")] }, /// ] /// ``` /// /// or by replacing the entire span: /// /// ``` /// vec![ /// Substitution { parts: vec![(0..7, "a.b")] }, /// Substitution { parts: vec![(0..7, "x.y")] }, /// ] /// ``` pub substitutions: Vec<Substitution>, pub msg: String, pub show_code_when_inline: bool, /// Whether or not the suggestion is approximate /// /// Sometimes we may show suggestions with placeholders, /// which are useful for users but not useful for /// tools like rustfix pub applicability: Applicability, } #[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] /// See the docs on `CodeSuggestion::substitutions` pub struct Substitution { pub parts: Vec<SubstitutionPart>, } #[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub struct SubstitutionPart { pub span: Span, pub snippet: String, } pub type SourceMapperDyn = dyn SourceMapper + sync::Send + sync::Sync; pub trait SourceMapper { fn lookup_char_pos(&self, pos: BytePos) -> Loc; fn span_to_lines(&self, sp: Span) -> FileLinesResult; fn span_to_string(&self, sp: Span) -> String; fn span_to_filename(&self, sp: Span) -> FileName; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>; fn call_span_if_macro(&self, sp: Span) -> Span; fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool; fn doctest_offset_line(&self, line: usize) -> usize; } impl CodeSuggestion { /// Returns the assembled code suggestions and whether they should be shown with an underline. pub fn splice_lines(&self, cm: &SourceMapperDyn) -> Vec<(String, Vec<SubstitutionPart>)> { use syntax_pos::{CharPos, Loc, Pos}; fn push_trailing(buf: &mut String, line_opt: Option<&Cow<str>>, lo: &Loc, hi_opt: Option<&Loc>) { let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi| hi.col.to_usize())); if let Some(line) = line_opt { if let Some(lo) = line.char_indices().map(|(i, _)| i).nth(lo) { let hi_opt = hi_opt.and_then(|hi| line.char_indices().map(|(i, _)| i).nth(hi)); buf.push_str(match hi_opt { Some(hi) => &line[lo..hi], None => &line[lo..], }); } if let None = hi_opt { buf.push('\n'); } } } assert!(!self.substitutions.is_empty()); self.substitutions.iter().cloned().map(|mut substitution| { // Assumption: all spans are in the same file, and all spans // are disjoint. Sort in ascending order. substitution.parts.sort_by_key(|part| part.span.lo()); // Find the bounding span. let lo = substitution.parts.iter().map(|part| part.span.lo()).min().unwrap(); let hi = substitution.parts.iter().map(|part| part.span.hi()).min().unwrap(); let bounding_span = Span::new(lo, hi, NO_EXPANSION); let lines = cm.span_to_lines(bounding_span).unwrap(); assert!(!lines.lines.is_empty()); // To build up the result, we do this for each span: // - push the line segment trailing the previous span // (at the beginning a "phantom" span pointing at the start of the line) // - push lines between the previous and current span (if any) // - if the previous and current span are not on the same line // push the line segment leading up to the current span // - splice in the span substitution // // Finally push the trailing line segment of the last span let fm = &lines.file; let mut prev_hi = cm.lookup_char_pos(bounding_span.lo()); prev_hi.col = CharPos::from_usize(0); let mut prev_line = fm.get_line(lines.lines[0].line_index); let mut buf = String::new(); for part in &substitution.parts { let cur_lo = cm.lookup_char_pos(part.span.lo()); if prev_hi.line == cur_lo.line { push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, Some(&cur_lo)); } else { push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, None); // push lines between the previous and current span (if any) for idx in prev_hi.line..(cur_lo.line - 1) { if let Some(line) = fm.get_line(idx) { buf.push_str(line.as_ref()); buf.push('\n'); } } if let Some(cur_line) = fm.get_line(cur_lo.line - 1) { buf.push_str(&cur_line[..cur_lo.col.to_usize()]); } } buf.push_str(&part.snippet); prev_hi = cm.lookup_char_pos(part.span.hi()); prev_line = fm.get_line(prev_hi.line - 1); } // if the replacement already ends with a newline, don't print the next line if !buf.ends_with('\n') { push_trailing(&mut buf, prev_line.as_ref(), &prev_hi, None); } // remove trailing newlines while buf.ends_with('\n') { buf.pop(); } (buf, substitution.parts) }).collect() } } /// Used as a return value to signify a fatal error occurred. (It is also /// used as the argument to panic at the moment, but that will eventually /// not be true.) #[derive(Copy, Clone, Debug)] #[must_use] pub struct FatalError; pub struct FatalErrorMarker; // Don't implement Send on FatalError. This makes it impossible to panic!(FatalError). // We don't want to invoke the panic handler and print a backtrace for fatal errors. impl !Send for FatalError {} impl FatalError { pub fn raise(self) -> ! { panic::resume_unwind(Box::new(FatalErrorMarker)) } } impl fmt::Display for FatalError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "parser fatal error") } } impl error::Error for FatalError { fn description(&self) -> &str { "The parser has encountered a fatal error" } } /// Signifies that the compiler died with an explicit call to `.bug` /// or `.span_bug` rather than a failed assertion, etc. #[derive(Copy, Clone, Debug)] pub struct ExplicitBug; impl fmt::Display for ExplicitBug { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "parser internal bug") } } impl error::Error for ExplicitBug { fn description(&self) -> &str { "The parser has encountered an internal bug" } } pub use diagnostic::{Diagnostic, SubDiagnostic, DiagnosticStyledString, DiagnosticId}; pub use diagnostic_builder::DiagnosticBuilder; /// A handler deals with errors; certain errors /// (fatal, bug, unimpl) may cause immediate exit, /// others log errors for later reporting. pub struct Handler { pub flags: HandlerFlags, err_count: AtomicUsize, emitter: Lock<Box<dyn Emitter + sync::Send>>, continue_after_error: LockCell<bool>, delayed_span_bugs: Lock<Vec<Diagnostic>>, // This set contains the `DiagnosticId` of all emitted diagnostics to avoid // emitting the same diagnostic with extended help (`--teach`) twice, which // would be uneccessary repetition. taught_diagnostics: Lock<FxHashSet<DiagnosticId>>, /// Used to suggest rustc --explain <error code> emitted_diagnostic_codes: Lock<FxHashSet<DiagnosticId>>, // This set contains a hash of every diagnostic that has been emitted by // this handler. These hashes is used to avoid emitting the same error // twice. emitted_diagnostics: Lock<FxHashSet<u128>>, } fn default_track_diagnostic(_: &Diagnostic) {} thread_local!(pub static TRACK_DIAGNOSTICS: Cell<fn(&Diagnostic)> = Cell::new(default_track_diagnostic)); #[derive(Default)] pub struct HandlerFlags { pub can_emit_warnings: bool, pub treat_err_as_bug: bool, pub report_delayed_bugs: bool, pub external_macro_backtrace: bool, } impl Drop for Handler { fn drop(&mut self) { if self.err_count() == 0 { let mut bugs = self.delayed_span_bugs.borrow_mut(); let has_bugs = !bugs.is_empty(); for bug in bugs.drain(..) { DiagnosticBuilder::new_diagnostic(self, bug).emit(); } if has_bugs { panic!("no errors encountered even though `delay_span_bug` issued"); } } } } impl Handler { pub fn with_tty_emitter(color_config: ColorConfig, can_emit_warnings: bool, treat_err_as_bug: bool, cm: Option<Lrc<SourceMapperDyn>>) -> Handler { Handler::with_tty_emitter_and_flags( color_config, cm, HandlerFlags { can_emit_warnings, treat_err_as_bug, .. Default::default() }) } pub fn with_tty_emitter_and_flags(color_config: ColorConfig, cm: Option<Lrc<SourceMapperDyn>>, flags: HandlerFlags) -> Handler { let emitter = Box::new(EmitterWriter::stderr(color_config, cm, false, false)); Handler::with_emitter_and_flags(emitter, flags) } pub fn with_emitter(can_emit_warnings: bool, treat_err_as_bug: bool, e: Box<dyn Emitter + sync::Send>) -> Handler { Handler::with_emitter_and_flags( e, HandlerFlags { can_emit_warnings, treat_err_as_bug, .. Default::default() }) } pub fn with_emitter_and_flags(e: Box<dyn Emitter + sync::Send>, flags: HandlerFlags) -> Handler { Handler { flags, err_count: AtomicUsize::new(0), emitter: Lock::new(e), continue_after_error: LockCell::new(true), delayed_span_bugs: Lock::new(Vec::new()), taught_diagnostics: Lock::new(FxHashSet()), emitted_diagnostic_codes: Lock::new(FxHashSet()), emitted_diagnostics: Lock::new(FxHashSet()), } } pub fn set_continue_after_error(&self, continue_after_error: bool) { self.continue_after_error.set(continue_after_error); } /// Resets the diagnostic error count as well as the cached emitted diagnostics. /// /// NOTE: DO NOT call this function from rustc. It is only meant to be called from external /// tools that want to reuse a `Parser` cleaning the previously emitted diagnostics as well as /// the overall count of emitted error diagnostics. pub fn reset_err_count(&self) { *self.emitted_diagnostics.borrow_mut() = FxHashSet(); self.err_count.store(0, SeqCst); } pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> { DiagnosticBuilder::new(self, Level::Cancelled, "") } pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); result.set_span(sp); if !self.flags.can_emit_warnings { result.cancel(); } result } pub fn struct_span_warn_with_code<'a, S: Into<MultiSpan>>(&'a self, sp: S, msg: &str, code: DiagnosticId) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); result.set_span(sp); result.code(code); if !self.flags.can_emit_warnings { result.cancel(); } result } pub fn struct_warn<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); if !self.flags.can_emit_warnings { result.cancel(); } result } pub fn struct_span_err<'a, S: Into<MultiSpan>>(&'a self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Error, msg); result.set_span(sp); result } pub fn struct_span_err_with_code<'a, S: Into<MultiSpan>>(&'a self, sp: S, msg: &str, code: DiagnosticId) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Error, msg); result.set_span(sp); result.code(code); result } // FIXME: This method should be removed (every error should have an associated error code). pub fn struct_err<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> { DiagnosticBuilder::new(self, Level::Error, msg) } pub fn struct_err_with_code<'a>( &'a self, msg: &str, code: DiagnosticId, ) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Error, msg); result.code(code); result } pub fn struct_span_fatal<'a, S: Into<MultiSpan>>(&'a self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg); result.set_span(sp); result } pub fn struct_span_fatal_with_code<'a, S: Into<MultiSpan>>(&'a self, sp: S, msg: &str, code: DiagnosticId) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg); result.set_span(sp); result.code(code); result } pub fn struct_fatal<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> { DiagnosticBuilder::new(self, Level::Fatal, msg) } pub fn cancel(&self, err: &mut DiagnosticBuilder) { err.cancel(); } fn panic_if_treat_err_as_bug(&self) { if self.flags.treat_err_as_bug { panic!("encountered error with `-Z treat_err_as_bug"); } } pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> FatalError { self.emit(&sp.into(), msg, Fatal); FatalError } pub fn span_fatal_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) -> FatalError { self.emit_with_code(&sp.into(), msg, code, Fatal); FatalError } pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { self.emit(&sp.into(), msg, Error); } pub fn mut_span_err<'a, S: Into<MultiSpan>>(&'a self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { let mut result = DiagnosticBuilder::new(self, Level::Error, msg); result.set_span(sp); result } pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) { self.emit_with_code(&sp.into(), msg, code, Error); } pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { self.emit(&sp.into(), msg, Warning); } pub fn span_warn_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) { self.emit_with_code(&sp.into(), msg, code, Warning); } pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { self.emit(&sp.into(), msg, Bug); panic!(ExplicitBug); } pub fn delay_span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { if self.flags.treat_err_as_bug { // FIXME: don't abort here if report_delayed_bugs is off self.span_bug(sp, msg); } let mut diagnostic = Diagnostic::new(Level::Bug, msg); diagnostic.set_span(sp.into()); self.delay_as_bug(diagnostic); } fn delay_as_bug(&self, diagnostic: Diagnostic) { if self.flags.report_delayed_bugs { DiagnosticBuilder::new_diagnostic(self, diagnostic.clone()).emit(); } self.delayed_span_bugs.borrow_mut().push(diagnostic); } pub fn span_bug_no_panic<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { self.emit(&sp.into(), msg, Bug); } pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { self.emit(&sp.into(), msg, Note); } pub fn span_note_diag<'a>(&'a self, sp: Span, msg: &str) -> DiagnosticBuilder<'a> { let mut db = DiagnosticBuilder::new(self, Note, msg); db.set_span(sp); db } pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { self.span_bug(sp, &format!("unimplemented {}", msg)); } pub fn failure(&self, msg: &str) { DiagnosticBuilder::new(self, FailureNote, msg).emit() } pub fn fatal(&self, msg: &str) -> FatalError { if self.flags.treat_err_as_bug { self.bug(msg); } DiagnosticBuilder::new(self, Fatal, msg).emit(); FatalError } pub fn err(&self, msg: &str) { if self.flags.treat_err_as_bug { self.bug(msg); } let mut db = DiagnosticBuilder::new(self, Error, msg); db.emit(); } pub fn warn(&self, msg: &str) { let mut db = DiagnosticBuilder::new(self, Warning, msg); db.emit(); } pub fn note_without_error(&self, msg: &str) { let mut db = DiagnosticBuilder::new(self, Note, msg); db.emit(); } pub fn bug(&self, msg: &str) -> ! { let mut db = DiagnosticBuilder::new(self, Bug, msg); db.emit(); panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { self.bug(&format!("unimplemented {}", msg)); } fn bump_err_count(&self) { self.panic_if_treat_err_as_bug(); self.err_count.fetch_add(1, SeqCst); } pub fn err_count(&self) -> usize { self.err_count.load(SeqCst) } pub fn has_errors(&self) -> bool { self.err_count() > 0 } pub fn print_error_count(&self) { let s = match self.err_count() { 0 => return, 1 => "aborting due to previous error".to_string(), _ => format!("aborting due to {} previous errors", self.err_count()) }; let _ = self.fatal(&s); let can_show_explain = self.emitter.borrow().should_show_explain(); let are_there_diagnostics = !self.emitted_diagnostic_codes.borrow().is_empty(); if can_show_explain && are_there_diagnostics { let mut error_codes = self.emitted_diagnostic_codes.borrow() .iter() .filter_map(|x| match *x { DiagnosticId::Error(ref s) => Some(s.clone()), _ => None, }) .collect::<Vec<_>>(); if !error_codes.is_empty() { error_codes.sort(); if error_codes.len() > 1 { let limit = if error_codes.len() > 9 { 9 } else { error_codes.len() }; self.failure(&format!("Some errors occurred: {}{}", error_codes[..limit].join(", "), if error_codes.len() > 9 { "..." } else { "." })); self.failure(&format!("For more information about an error, try \ `rustc --explain {}`.", &error_codes[0])); } else { self.failure(&format!("For more information about this error, try \ `rustc --explain {}`.", &error_codes[0])); } } } } pub fn abort_if_errors(&self) { if self.err_count() == 0 { return; } FatalError.raise(); } pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) { if lvl == Warning && !self.flags.can_emit_warnings { return; } let mut db = DiagnosticBuilder::new(self, lvl, msg); db.set_span(msp.clone()); db.emit(); if !self.continue_after_error.get() { self.abort_if_errors(); } } pub fn emit_with_code(&self, msp: &MultiSpan, msg: &str, code: DiagnosticId, lvl: Level) { if lvl == Warning && !self.flags.can_emit_warnings { return; } let mut db = DiagnosticBuilder::new_with_code(self, lvl, Some(code), msg); db.set_span(msp.clone()); db.emit(); if !self.continue_after_error.get() { self.abort_if_errors(); } } /// `true` if we haven't taught a diagnostic with this code already. /// The caller must then teach the user about such a diagnostic. /// /// Used to suppress emitting the same error multiple times with extended explanation when /// calling `-Zteach`. pub fn must_teach(&self, code: &DiagnosticId) -> bool { self.taught_diagnostics.borrow_mut().insert(code.clone()) } pub fn force_print_db(&self, mut db: DiagnosticBuilder) { self.emitter.borrow_mut().emit(&db); db.cancel(); } fn emit_db(&self, db: &DiagnosticBuilder) { let diagnostic = &**db; TRACK_DIAGNOSTICS.with(|track_diagnostics| { track_diagnostics.get()(diagnostic); }); if let Some(ref code) = diagnostic.code { self.emitted_diagnostic_codes.borrow_mut().insert(code.clone()); } let diagnostic_hash = { use std::hash::Hash; let mut hasher = StableHasher::new(); diagnostic.hash(&mut hasher); hasher.finish() }; // Only emit the diagnostic if we haven't already emitted an equivalent // one: if self.emitted_diagnostics.borrow_mut().insert(diagnostic_hash) { self.emitter.borrow_mut().emit(db); if db.is_error() { self.bump_err_count(); } } } } #[derive(Copy, PartialEq, Clone, Hash, Debug, RustcEncodable, RustcDecodable)] pub enum Level { Bug, Fatal, // An error which while not immediately fatal, should stop the compiler // progressing beyond the current phase. PhaseFatal, Error, Warning, Note, Help, Cancelled, FailureNote, } impl fmt::Display for Level { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.to_str().fmt(f) } } impl Level { fn color(self) -> ColorSpec { let mut spec = ColorSpec::new(); match self { Bug | Fatal | PhaseFatal | Error => { spec.set_fg(Some(Color::Red)) .set_intense(true); } Warning => { spec.set_fg(Some(Color::Yellow)) .set_intense(cfg!(windows)); } Note => { spec.set_fg(Some(Color::Green)) .set_intense(true); } Help => { spec.set_fg(Some(Color::Cyan)) .set_intense(true); } FailureNote => {} Cancelled => unreachable!(), } spec } pub fn to_str(self) -> &'static str { match self { Bug => "error: internal compiler error", Fatal | PhaseFatal | Error => "error", Warning => "warning", Note => "note", Help => "help", FailureNote => "", Cancelled => panic!("Shouldn't call on cancelled error"), } } pub fn is_failure_note(&self) -> bool { match *self { FailureNote => true, _ => false, } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/diagnostic_builder.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use Diagnostic; use DiagnosticId; use DiagnosticStyledString; use Applicability; use Level; use Handler; use std::fmt::{self, Debug}; use std::ops::{Deref, DerefMut}; use std::thread::panicking; use syntax_pos::{MultiSpan, Span}; /// Used for emitting structured error messages and other diagnostic information. #[must_use] #[derive(Clone)] pub struct DiagnosticBuilder<'a> { pub handler: &'a Handler, diagnostic: Diagnostic, allow_suggestions: bool, } /// In general, the `DiagnosticBuilder` uses deref to allow access to /// the fields and methods of the embedded `diagnostic` in a /// transparent way. *However,* many of the methods are intended to /// be used in a chained way, and hence ought to return `self`. In /// that case, we can't just naively forward to the method on the /// `diagnostic`, because the return type would be a `&Diagnostic` /// instead of a `&DiagnosticBuilder<'a>`. This `forward!` macro makes /// it easy to declare such methods on the builder. macro_rules! forward { // Forward pattern for &self -> &Self (pub fn $n:ident(&self, $($name:ident: $ty:ty),*) -> &Self) => { pub fn $n(&self, $($name: $ty),*) -> &Self { self.diagnostic.$n($($name),*); self } }; // Forward pattern for &mut self -> &mut Self (pub fn $n:ident(&mut self, $($name:ident: $ty:ty),*) -> &mut Self) => { pub fn $n(&mut self, $($name: $ty),*) -> &mut Self { self.diagnostic.$n($($name),*); self } }; // Forward pattern for &mut self -> &mut Self, with S: Into<MultiSpan> // type parameter. No obvious way to make this more generic. (pub fn $n:ident<S: Into<MultiSpan>>(&mut self, $($name:ident: $ty:ty),*) -> &mut Self) => { pub fn $n<S: Into<MultiSpan>>(&mut self, $($name: $ty),*) -> &mut Self { self.diagnostic.$n($($name),*); self } }; } impl<'a> Deref for DiagnosticBuilder<'a> { type Target = Diagnostic; fn deref(&self) -> &Diagnostic { &self.diagnostic } } impl<'a> DerefMut for DiagnosticBuilder<'a> { fn deref_mut(&mut self) -> &mut Diagnostic { &mut self.diagnostic } } impl<'a> DiagnosticBuilder<'a> { /// Emit the diagnostic. pub fn emit(&mut self) { if self.cancelled() { return; } self.handler.emit_db(&self); self.cancel(); } /// Buffers the diagnostic for later emission. pub fn buffer(self, buffered_diagnostics: &mut Vec<Diagnostic>) { // We need to use `ptr::read` because `DiagnosticBuilder` // implements `Drop`. let diagnostic; unsafe { diagnostic = ::std::ptr::read(&self.diagnostic); ::std::mem::forget(self); }; buffered_diagnostics.push(diagnostic); } /// Convenience function for internal use, clients should use one of the /// span_* methods instead. pub fn sub<S: Into<MultiSpan>>( &mut self, level: Level, message: &str, span: Option<S>, ) -> &mut Self { let span = span.map(|s| s.into()).unwrap_or(MultiSpan::new()); self.diagnostic.sub(level, message, span, None); self } /// Delay emission of this diagnostic as a bug. /// /// This can be useful in contexts where an error indicates a bug but /// typically this only happens when other compilation errors have already /// happened. In those cases this can be used to defer emission of this /// diagnostic as a bug in the compiler only if no other errors have been /// emitted. /// /// In the meantime, though, callsites are required to deal with the "bug" /// locally in whichever way makes the most sense. pub fn delay_as_bug(&mut self) { self.level = Level::Bug; self.handler.delay_as_bug(self.diagnostic.clone()); self.cancel(); } /// Add a span/label to be included in the resulting snippet. /// This is pushed onto the `MultiSpan` that was created when the /// diagnostic was first built. If you don't call this function at /// all, and you just supplied a `Span` to create the diagnostic, /// then the snippet will just include that `Span`, which is /// called the primary span. pub fn span_label<T: Into<String>>(&mut self, span: Span, label: T) -> &mut Self { self.diagnostic.span_label(span, label); self } forward!(pub fn note_expected_found(&mut self, label: &dyn fmt::Display, expected: DiagnosticStyledString, found: DiagnosticStyledString) -> &mut Self); forward!(pub fn note_expected_found_extra(&mut self, label: &dyn fmt::Display, expected: DiagnosticStyledString, found: DiagnosticStyledString, expected_extra: &dyn fmt::Display, found_extra: &dyn fmt::Display) -> &mut Self); forward!(pub fn note(&mut self, msg: &str) -> &mut Self); forward!(pub fn span_note<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self); forward!(pub fn warn(&mut self, msg: &str) -> &mut Self); forward!(pub fn span_warn<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self); forward!(pub fn help(&mut self , msg: &str) -> &mut Self); forward!(pub fn span_help<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self); forward!(pub fn span_suggestion_short(&mut self, sp: Span, msg: &str, suggestion: String) -> &mut Self); forward!(pub fn multipart_suggestion( &mut self, msg: &str, suggestion: Vec<(Span, String)> ) -> &mut Self); forward!(pub fn span_suggestion(&mut self, sp: Span, msg: &str, suggestion: String) -> &mut Self); forward!(pub fn span_suggestions(&mut self, sp: Span, msg: &str, suggestions: Vec<String>) -> &mut Self); pub fn span_suggestion_with_applicability(&mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability) -> &mut Self { if !self.allow_suggestions { return self } self.diagnostic.span_suggestion_with_applicability( sp, msg, suggestion, applicability, ); self } pub fn span_suggestions_with_applicability(&mut self, sp: Span, msg: &str, suggestions: Vec<String>, applicability: Applicability) -> &mut Self { if !self.allow_suggestions { return self } self.diagnostic.span_suggestions_with_applicability( sp, msg, suggestions, applicability, ); self } pub fn span_suggestion_short_with_applicability(&mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability) -> &mut Self { if !self.allow_suggestions { return self } self.diagnostic.span_suggestion_short_with_applicability( sp, msg, suggestion, applicability, ); self } forward!(pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self); forward!(pub fn code(&mut self, s: DiagnosticId) -> &mut Self); pub fn allow_suggestions(&mut self, allow: bool) -> &mut Self { self.allow_suggestions = allow; self } /// Convenience function for internal use, clients should use one of the /// struct_* methods on Handler. pub fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuilder<'a> { DiagnosticBuilder::new_with_code(handler, level, None, message) } /// Convenience function for internal use, clients should use one of the /// struct_* methods on Handler. pub fn new_with_code(handler: &'a Handler, level: Level, code: Option<DiagnosticId>, message: &str) -> DiagnosticBuilder<'a> { let diagnostic = Diagnostic::new_with_code(level, code, message); DiagnosticBuilder::new_diagnostic(handler, diagnostic) } /// Creates a new `DiagnosticBuilder` with an already constructed /// diagnostic. pub fn new_diagnostic(handler: &'a Handler, diagnostic: Diagnostic) -> DiagnosticBuilder<'a> { DiagnosticBuilder { handler, diagnostic, allow_suggestions: true, } } } impl<'a> Debug for DiagnosticBuilder<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.diagnostic.fmt(f) } } /// Destructor bomb - a `DiagnosticBuilder` must be either emitted or canceled /// or we emit a bug. impl<'a> Drop for DiagnosticBuilder<'a> { fn drop(&mut self) { if !panicking() && !self.cancelled() { let mut db = DiagnosticBuilder::new(self.handler, Level::Bug, "Error constructed but not emitted"); db.emit(); panic!(); } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/emitter.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use self::Destination::*; use syntax_pos::{SourceFile, Span, MultiSpan}; use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, SourceMapperDyn, DiagnosticId}; use snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style}; use styled_buffer::StyledBuffer; use rustc_data_structures::sync::Lrc; use atty; use std::borrow::Cow; use std::io::prelude::*; use std::io; use std::collections::HashMap; use std::cmp::{min, Reverse}; use termcolor::{StandardStream, ColorChoice, ColorSpec, BufferWriter}; use termcolor::{WriteColor, Color, Buffer}; use unicode_width; const ANONYMIZED_LINE_NUM: &str = "LL"; /// Emitter trait for emitting errors. pub trait Emitter { /// Emit a structured diagnostic. fn emit(&mut self, db: &DiagnosticBuilder); /// Check if should show explanations about "rustc --explain" fn should_show_explain(&self) -> bool { true } } impl Emitter for EmitterWriter { fn emit(&mut self, db: &DiagnosticBuilder) { let mut primary_span = db.span.clone(); let mut children = db.children.clone(); let mut suggestions: &[_] = &[]; if let Some((sugg, rest)) = db.suggestions.split_first() { if rest.is_empty() && // don't display multi-suggestions as labels sugg.substitutions.len() == 1 && // don't display multipart suggestions as labels sugg.substitutions[0].parts.len() == 1 && // don't display long messages as labels sugg.msg.split_whitespace().count() < 10 && // don't display multiline suggestions as labels !sugg.substitutions[0].parts[0].snippet.contains('\n') { let substitution = &sugg.substitutions[0].parts[0].snippet.trim(); let msg = if substitution.len() == 0 || !sugg.show_code_when_inline { // This substitution is only removal or we explicitly don't want to show the // code inline, don't show it format!("help: {}", sugg.msg) } else { format!("help: {}: `{}`", sugg.msg, substitution) }; primary_span.push_span_label(sugg.substitutions[0].parts[0].span, msg); } else { // if there are multiple suggestions, print them all in full // to be consistent. We could try to figure out if we can // make one (or the first one) inline, but that would give // undue importance to a semi-random suggestion suggestions = &db.suggestions; } } self.fix_multispans_in_std_macros(&mut primary_span, &mut children, db.handler.flags.external_macro_backtrace); self.emit_messages_default(&db.level, &db.styled_message(), &db.code, &primary_span, &children, &suggestions); } fn should_show_explain(&self) -> bool { !self.short_message } } /// maximum number of lines we will print for each error; arbitrary. pub const MAX_HIGHLIGHT_LINES: usize = 6; /// maximum number of suggestions to be shown /// /// Arbitrary, but taken from trait import suggestion limit pub const MAX_SUGGESTIONS: usize = 4; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ColorConfig { Auto, Always, Never, } impl ColorConfig { fn to_color_choice(&self) -> ColorChoice { match *self { ColorConfig::Always => ColorChoice::Always, ColorConfig::Never => ColorChoice::Never, ColorConfig::Auto if atty::is(atty::Stream::Stderr) => { ColorChoice::Auto } ColorConfig::Auto => ColorChoice::Never, } } } pub struct EmitterWriter { dst: Destination, cm: Option<Lrc<SourceMapperDyn>>, short_message: bool, teach: bool, ui_testing: bool, } struct FileWithAnnotatedLines { file: Lrc<SourceFile>, lines: Vec<Line>, multiline_depth: usize, } impl EmitterWriter { pub fn stderr(color_config: ColorConfig, code_map: Option<Lrc<SourceMapperDyn>>, short_message: bool, teach: bool) -> EmitterWriter { let dst = Destination::from_stderr(color_config); EmitterWriter { dst, cm: code_map, short_message, teach, ui_testing: false, } } pub fn new(dst: Box<dyn Write + Send>, code_map: Option<Lrc<SourceMapperDyn>>, short_message: bool, teach: bool) -> EmitterWriter { EmitterWriter { dst: Raw(dst), cm: code_map, short_message, teach, ui_testing: false, } } pub fn ui_testing(mut self, ui_testing: bool) -> Self { self.ui_testing = ui_testing; self } fn maybe_anonymized(&self, line_num: usize) -> String { if self.ui_testing { ANONYMIZED_LINE_NUM.to_string() } else { line_num.to_string() } } fn preprocess_annotations(&mut self, msp: &MultiSpan) -> Vec<FileWithAnnotatedLines> { fn add_annotation_to_file(file_vec: &mut Vec<FileWithAnnotatedLines>, file: Lrc<SourceFile>, line_index: usize, ann: Annotation) { for slot in file_vec.iter_mut() { // Look through each of our files for the one we're adding to if slot.file.name == file.name { // See if we already have a line for it for line_slot in &mut slot.lines { if line_slot.line_index == line_index { line_slot.annotations.push(ann); return; } } // We don't have a line yet, create one slot.lines.push(Line { line_index, annotations: vec![ann], }); slot.lines.sort(); return; } } // This is the first time we're seeing the file file_vec.push(FileWithAnnotatedLines { file, lines: vec![Line { line_index, annotations: vec![ann], }], multiline_depth: 0, }); } let mut output = vec![]; let mut multiline_annotations = vec![]; if let Some(ref cm) = self.cm { for span_label in msp.span_labels() { if span_label.span.is_dummy() { continue; } let lo = cm.lookup_char_pos(span_label.span.lo()); let mut hi = cm.lookup_char_pos(span_label.span.hi()); // Watch out for "empty spans". If we get a span like 6..6, we // want to just display a `^` at 6, so convert that to // 6..7. This is degenerate input, but it's best to degrade // gracefully -- and the parser likes to supply a span like // that for EOF, in particular. if lo.col_display == hi.col_display && lo.line == hi.line { hi.col_display += 1; } let ann_type = if lo.line != hi.line { let ml = MultilineAnnotation { depth: 1, line_start: lo.line, line_end: hi.line, start_col: lo.col_display, end_col: hi.col_display, is_primary: span_label.is_primary, label: span_label.label.clone(), }; multiline_annotations.push((lo.file.clone(), ml.clone())); AnnotationType::Multiline(ml) } else { AnnotationType::Singleline }; let ann = Annotation { start_col: lo.col_display, end_col: hi.col_display, is_primary: span_label.is_primary, label: span_label.label.clone(), annotation_type: ann_type, }; if !ann.is_multiline() { add_annotation_to_file(&mut output, lo.file, lo.line, ann); } } } // Find overlapping multiline annotations, put them at different depths multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, ml.line_end)); for item in multiline_annotations.clone() { let ann = item.1; for item in multiline_annotations.iter_mut() { let ref mut a = item.1; // Move all other multiline annotations overlapping with this one // one level to the right. if &ann != a && num_overlap(ann.line_start, ann.line_end, a.line_start, a.line_end, true) { a.increase_depth(); } else { break; } } } let mut max_depth = 0; // max overlapping multiline spans for (file, ann) in multiline_annotations { if ann.depth > max_depth { max_depth = ann.depth; } add_annotation_to_file(&mut output, file.clone(), ann.line_start, ann.as_start()); let middle = min(ann.line_start + 4, ann.line_end); for line in ann.line_start + 1..middle { add_annotation_to_file(&mut output, file.clone(), line, ann.as_line()); } if middle < ann.line_end - 1 { for line in ann.line_end - 1..ann.line_end { add_annotation_to_file(&mut output, file.clone(), line, ann.as_line()); } } add_annotation_to_file(&mut output, file, ann.line_end, ann.as_end()); } for file_vec in output.iter_mut() { file_vec.multiline_depth = max_depth; } output } fn render_source_line(&self, buffer: &mut StyledBuffer, file: Lrc<SourceFile>, line: &Line, width_offset: usize, code_offset: usize) -> Vec<(usize, Style)> { if line.line_index == 0 { return Vec::new(); } let source_string = match file.get_line(line.line_index - 1) { Some(s) => s, None => return Vec::new(), }; let line_offset = buffer.num_lines(); // First create the source line we will highlight. buffer.puts(line_offset, code_offset, &source_string, Style::Quotation); buffer.puts(line_offset, 0, &self.maybe_anonymized(line.line_index), Style::LineNumber); draw_col_separator(buffer, line_offset, width_offset - 2); // Special case when there's only one annotation involved, it is the start of a multiline // span and there's no text at the beginning of the code line. Instead of doing the whole // graph: // // 2 | fn foo() { // | _^ // 3 | | // 4 | | } // | |_^ test // // we simplify the output to: // // 2 | / fn foo() { // 3 | | // 4 | | } // | |_^ test if line.annotations.len() == 1 { if let Some(ref ann) = line.annotations.get(0) { if let AnnotationType::MultilineStart(depth) = ann.annotation_type { if source_string.chars() .take(ann.start_col) .all(|c| c.is_whitespace()) { let style = if ann.is_primary { Style::UnderlinePrimary } else { Style::UnderlineSecondary }; buffer.putc(line_offset, width_offset + depth - 1, '/', style); return vec![(depth, style)]; } } } } // We want to display like this: // // vec.push(vec.pop().unwrap()); // --- ^^^ - previous borrow ends here // | | // | error occurs here // previous borrow of `vec` occurs here // // But there are some weird edge cases to be aware of: // // vec.push(vec.pop().unwrap()); // -------- - previous borrow ends here // || // |this makes no sense // previous borrow of `vec` occurs here // // For this reason, we group the lines into "highlight lines" // and "annotations lines", where the highlight lines have the `^`. // Sort the annotations by (start, end col) // The labels are reversed, sort and then reversed again. // Consider a list of annotations (A1, A2, C1, C2, B1, B2) where // the letter signifies the span. Here we are only sorting by the // span and hence, the order of the elements with the same span will // not change. On reversing the ordering (|a, b| but b.cmp(a)), you get // (C1, C2, B1, B2, A1, A2). All the elements with the same span are // still ordered first to last, but all the elements with different // spans are ordered by their spans in last to first order. Last to // first order is important, because the jiggly lines and | are on // the left, so the rightmost span needs to be rendered first, // otherwise the lines would end up needing to go over a message. let mut annotations = line.annotations.clone(); annotations.sort_by_key(|a| Reverse(a.start_col)); // First, figure out where each label will be positioned. // // In the case where you have the following annotations: // // vec.push(vec.pop().unwrap()); // -------- - previous borrow ends here [C] // || // |this makes no sense [B] // previous borrow of `vec` occurs here [A] // // `annotations_position` will hold [(2, A), (1, B), (0, C)]. // // We try, when possible, to stick the rightmost annotation at the end // of the highlight line: // // vec.push(vec.pop().unwrap()); // --- --- - previous borrow ends here // // But sometimes that's not possible because one of the other // annotations overlaps it. For example, from the test // `span_overlap_label`, we have the following annotations // (written on distinct lines for clarity): // // fn foo(x: u32) { // -------------- // - // // In this case, we can't stick the rightmost-most label on // the highlight line, or we would get: // // fn foo(x: u32) { // -------- x_span // | // fn_span // // which is totally weird. Instead we want: // // fn foo(x: u32) { // -------------- // | | // | x_span // fn_span // // which is...less weird, at least. In fact, in general, if // the rightmost span overlaps with any other span, we should // use the "hang below" version, so we can at least make it // clear where the span *starts*. There's an exception for this // logic, when the labels do not have a message: // // fn foo(x: u32) { // -------------- // | // x_span // // instead of: // // fn foo(x: u32) { // -------------- // | | // | x_span // <EMPTY LINE> // let mut annotations_position = vec![]; let mut line_len = 0; let mut p = 0; for (i, annotation) in annotations.iter().enumerate() { for (j, next) in annotations.iter().enumerate() { if overlaps(next, annotation, 0) // This label overlaps with another one and both && annotation.has_label() // take space (they have text and are not && j > i // multiline lines). && p == 0 // We're currently on the first line, move the label one line down { // This annotation needs a new line in the output. p += 1; break; } } annotations_position.push((p, annotation)); for (j, next) in annotations.iter().enumerate() { if j > i { let l = if let Some(ref label) = next.label { label.len() + 2 } else { 0 }; if (overlaps(next, annotation, l) // Do not allow two labels to be in the same // line if they overlap including padding, to // avoid situations like: // // fn foo(x: u32) { // -------^------ // | | // fn_spanx_span // && annotation.has_label() // Both labels must have some text, otherwise && next.has_label()) // they are not overlapping. // Do not add a new line if this annotation // or the next are vertical line placeholders. || (annotation.takes_space() // If either this or the next annotation is && next.has_label()) // multiline start/end, move it to a new line || (annotation.has_label() // so as not to overlap the orizontal lines. && next.takes_space()) || (annotation.takes_space() && next.takes_space()) || (overlaps(next, annotation, l) && next.end_col <= annotation.end_col && next.has_label() && p == 0) // Avoid #42595. { // This annotation needs a new line in the output. p += 1; break; } } } if line_len < p { line_len = p; } } if line_len != 0 { line_len += 1; } // If there are no annotations or the only annotations on this line are // MultilineLine, then there's only code being shown, stop processing. if line.annotations.iter().all(|a| a.is_line()) { return vec![]; } // Write the colunmn separator. // // After this we will have: // // 2 | fn foo() { // | // | // | // 3 | // 4 | } // | for pos in 0..line_len + 1 { draw_col_separator(buffer, line_offset + pos + 1, width_offset - 2); buffer.putc(line_offset + pos + 1, width_offset - 2, '|', Style::LineNumber); } // Write the horizontal lines for multiline annotations // (only the first and last lines need this). // // After this we will have: // // 2 | fn foo() { // | __________ // | // | // 3 | // 4 | } // | _ for &(pos, annotation) in &annotations_position { let style = if annotation.is_primary { Style::UnderlinePrimary } else { Style::UnderlineSecondary }; let pos = pos + 1; match annotation.annotation_type { AnnotationType::MultilineStart(depth) | AnnotationType::MultilineEnd(depth) => { draw_range(buffer, '_', line_offset + pos, width_offset + depth, code_offset + annotation.start_col, style); } _ if self.teach => { buffer.set_style_range(line_offset, code_offset + annotation.start_col, code_offset + annotation.end_col, style, annotation.is_primary); } _ => {} } } // Write the vertical lines for labels that are on a different line as the underline. // // After this we will have: // // 2 | fn foo() { // | __________ // | | | // | | // 3 | // 4 | | } // | |_ for &(pos, annotation) in &annotations_position { let style = if annotation.is_primary { Style::UnderlinePrimary } else { Style::UnderlineSecondary }; let pos = pos + 1; if pos > 1 && (annotation.has_label() || annotation.takes_space()) { for p in line_offset + 1..line_offset + pos + 1 { buffer.putc(p, code_offset + annotation.start_col, '|', style); } } match annotation.annotation_type { AnnotationType::MultilineStart(depth) => { for p in line_offset + pos + 1..line_offset + line_len + 2 { buffer.putc(p, width_offset + depth - 1, '|', style); } } AnnotationType::MultilineEnd(depth) => { for p in line_offset..line_offset + pos + 1 { buffer.putc(p, width_offset + depth - 1, '|', style); } } _ => (), } } // Write the labels on the annotations that actually have a label. // // After this we will have: // // 2 | fn foo() { // | __________ // | | // | something about `foo` // 3 | // 4 | } // | _ test for &(pos, annotation) in &annotations_position { let style = if annotation.is_primary { Style::LabelPrimary } else { Style::LabelSecondary }; let (pos, col) = if pos == 0 { (pos + 1, annotation.end_col + 1) } else { (pos + 2, annotation.start_col) }; if let Some(ref label) = annotation.label { buffer.puts(line_offset + pos, code_offset + col, &label, style); } } // Sort from biggest span to smallest span so that smaller spans are // represented in the output: // // x | fn foo() // | ^^^---^^ // | | | // | | something about `foo` // | something about `fn foo()` annotations_position.sort_by(|a, b| { // Decreasing order a.1.len().cmp(&b.1.len()).reverse() }); // Write the underlines. // // After this we will have: // // 2 | fn foo() { // | ____-_____^ // | | // | something about `foo` // 3 | // 4 | } // | _^ test for &(_, annotation) in &annotations_position { let (underline, style) = if annotation.is_primary { ('^', Style::UnderlinePrimary) } else { ('-', Style::UnderlineSecondary) }; for p in annotation.start_col..annotation.end_col { buffer.putc(line_offset + 1, code_offset + p, underline, style); } } annotations_position.iter().filter_map(|&(_, annotation)| { match annotation.annotation_type { AnnotationType::MultilineStart(p) | AnnotationType::MultilineEnd(p) => { let style = if annotation.is_primary { Style::LabelPrimary } else { Style::LabelSecondary }; Some((p, style)) } _ => None } }).collect::<Vec<_>>() } fn get_multispan_max_line_num(&mut self, msp: &MultiSpan) -> usize { let mut max = 0; if let Some(ref cm) = self.cm { for primary_span in msp.primary_spans() { if !primary_span.is_dummy() { let hi = cm.lookup_char_pos(primary_span.hi()); if hi.line > max { max = hi.line; } } } if !self.short_message { for span_label in msp.span_labels() { if !span_label.span.is_dummy() { let hi = cm.lookup_char_pos(span_label.span.hi()); if hi.line > max { max = hi.line; } } } } } max } fn get_max_line_num(&mut self, span: &MultiSpan, children: &[SubDiagnostic]) -> usize { let mut max = 0; let primary = self.get_multispan_max_line_num(span); max = if primary > max { primary } else { max }; for sub in children { let sub_result = self.get_multispan_max_line_num(&sub.span); max = if sub_result > max { primary } else { max }; } max } // This "fixes" MultiSpans that contain Spans that are pointing to locations inside of // <*macros>. Since these locations are often difficult to read, we move these Spans from // <*macros> to their corresponding use site. fn fix_multispan_in_std_macros(&mut self, span: &mut MultiSpan, always_backtrace: bool) -> bool { let mut spans_updated = false; if let Some(ref cm) = self.cm { let mut before_after: Vec<(Span, Span)> = vec![]; let mut new_labels: Vec<(Span, String)> = vec![]; // First, find all the spans in <*macros> and point instead at their use site for sp in span.primary_spans() { if sp.is_dummy() { continue; } let call_sp = cm.call_span_if_macro(*sp); if call_sp != *sp && !always_backtrace { before_after.push((*sp, call_sp)); } let backtrace_len = sp.macro_backtrace().len(); for (i, trace) in sp.macro_backtrace().iter().rev().enumerate() { // Only show macro locations that are local // and display them like a span_note if let Some(def_site) = trace.def_site_span { if def_site.is_dummy() { continue; } if always_backtrace { new_labels.push((def_site, format!("in this expansion of `{}`{}", trace.macro_decl_name, if backtrace_len > 2 { // if backtrace_len == 1 it'll be pointed // at by "in this macro invocation" format!(" (#{})", i + 1) } else { "".to_string() }))); } // Check to make sure we're not in any <*macros> if !cm.span_to_filename(def_site).is_macros() && !trace.macro_decl_name.starts_with("desugaring of ") && !trace.macro_decl_name.starts_with("#[") || always_backtrace { new_labels.push((trace.call_site, format!("in this macro invocation{}", if backtrace_len > 2 && always_backtrace { // only specify order when the macro // backtrace is multiple levels deep format!(" (#{})", i + 1) } else { "".to_string() }))); if !always_backtrace { break; } } } } } for (label_span, label_text) in new_labels { span.push_span_label(label_span, label_text); } for sp_label in span.span_labels() { if sp_label.span.is_dummy() { continue; } if cm.span_to_filename(sp_label.span.clone()).is_macros() && !always_backtrace { let v = sp_label.span.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp_label.span.clone(), use_site.call_site.clone())); } } } // After we have them, make sure we replace these 'bad' def sites with their use sites for (before, after) in before_after { span.replace(before, after); spans_updated = true; } } spans_updated } // This does a small "fix" for multispans by looking to see if it can find any that // point directly at <*macros>. Since these are often difficult to read, this // will change the span to point at the use site. fn fix_multispans_in_std_macros(&mut self, span: &mut MultiSpan, children: &mut Vec<SubDiagnostic>, backtrace: bool) { let mut spans_updated = self.fix_multispan_in_std_macros(span, backtrace); for child in children.iter_mut() { spans_updated |= self.fix_multispan_in_std_macros(&mut child.span, backtrace); } if spans_updated { children.push(SubDiagnostic { level: Level::Note, message: vec![ ("this error originates in a macro outside of the current crate \ (in Nightly builds, run with -Z external-macro-backtrace \ for more info)".to_string(), Style::NoStyle), ], span: MultiSpan::new(), render_span: None, }); } } /// Add a left margin to every line but the first, given a padding length and the label being /// displayed, keeping the provided highlighting. fn msg_to_buffer(&self, buffer: &mut StyledBuffer, msg: &[(String, Style)], padding: usize, label: &str, override_style: Option<Style>) { // The extra 5 ` ` is padding that's always needed to align to the `note: `: // // error: message // --> file.rs:13:20 // | // 13 | <CODE> // | ^^^^ // | // = note: multiline // message // ++^^^----xx // | | | | // | | | magic `2` // | | length of label // | magic `3` // `max_line_num_len` let padding = " ".repeat(padding + label.len() + 5); /// Return whether `style`, or the override if present and the style is `NoStyle`. fn style_or_override(style: Style, override_style: Option<Style>) -> Style { if let Some(o) = override_style { if style == Style::NoStyle { return o; } } style } let mut line_number = 0; // Provided the following diagnostic message: // // let msg = vec![ // (" // ("highlighted multiline\nstring to\nsee how it ", Style::NoStyle), // ("looks", Style::Highlight), // ("with\nvery ", Style::NoStyle), // ("weird", Style::Highlight), // (" formats\n", Style::NoStyle), // ("see?", Style::Highlight), // ]; // // the expected output on a note is (* surround the highlighted text) // // = note: highlighted multiline // string to // see how it *looks* with // very *weird* formats // see? for &(ref text, ref style) in msg.iter() { let lines = text.split('\n').collect::<Vec<_>>(); if lines.len() > 1 { for (i, line) in lines.iter().enumerate() { if i != 0 { line_number += 1; buffer.append(line_number, &padding, Style::NoStyle); } buffer.append(line_number, line, style_or_override(*style, override_style)); } } else { buffer.append(line_number, text, style_or_override(*style, override_style)); } } } fn emit_message_default(&mut self, msp: &MultiSpan, msg: &[(String, Style)], code: &Option<DiagnosticId>, level: &Level, max_line_num_len: usize, is_secondary: bool) -> io::Result<()> { let mut buffer = StyledBuffer::new(); let header_style = if is_secondary { Style::HeaderMsg } else { Style::MainHeaderMsg }; if msp.primary_spans().is_empty() && msp.span_labels().is_empty() && is_secondary && !self.short_message { // This is a secondary message with no span info for _ in 0..max_line_num_len { buffer.prepend(0, " ", Style::NoStyle); } draw_note_separator(&mut buffer, 0, max_line_num_len + 1); let level_str = level.to_string(); if !level_str.is_empty() { buffer.append(0, &level_str, Style::MainHeaderMsg); buffer.append(0, ": ", Style::NoStyle); } self.msg_to_buffer(&mut buffer, msg, max_line_num_len, "note", None); } else { let level_str = level.to_string(); if !level_str.is_empty() { buffer.append(0, &level_str, Style::Level(level.clone())); } // only render error codes, not lint codes if let Some(DiagnosticId::Error(ref code)) = *code { buffer.append(0, "[", Style::Level(level.clone())); buffer.append(0, &code, Style::Level(level.clone())); buffer.append(0, "]", Style::Level(level.clone())); } if !level_str.is_empty() { buffer.append(0, ": ", header_style); } for &(ref text, _) in msg.iter() { buffer.append(0, text, header_style); } } // Preprocess all the annotations so that they are grouped by file and by line number // This helps us quickly iterate over the whole message (including secondary file spans) let mut annotated_files = self.preprocess_annotations(msp); // Make sure our primary file comes first let (primary_lo, cm) = if let (Some(cm), Some(ref primary_span)) = (self.cm.as_ref(), msp.primary_span().as_ref()) { if !primary_span.is_dummy() { (cm.lookup_char_pos(primary_span.lo()), cm) } else { emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?; return Ok(()); } } else { // If we don't have span information, emit and exit emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?; return Ok(()); }; if let Ok(pos) = annotated_files.binary_search_by(|x| x.file.name.cmp(&primary_lo.file.name)) { annotated_files.swap(0, pos); } // Print out the annotate source lines that correspond with the error for annotated_file in annotated_files { // we can't annotate anything if the source is unavailable. if !cm.ensure_source_file_source_present(annotated_file.file.clone()) { continue; } // print out the span location and spacer before we print the annotated source // to do this, we need to know if this span will be primary let is_primary = primary_lo.file.name == annotated_file.file.name; if is_primary { let loc = primary_lo.clone(); if !self.short_message { // remember where we are in the output buffer for easy reference let buffer_msg_line_offset = buffer.num_lines(); buffer.prepend(buffer_msg_line_offset, "--> ", Style::LineNumber); buffer.append(buffer_msg_line_offset, &format!("{}:{}:{}", loc.file.name, cm.doctest_offset_line(loc.line), loc.col.0 + 1), Style::LineAndColumn); for _ in 0..max_line_num_len { buffer.prepend(buffer_msg_line_offset, " ", Style::NoStyle); } } else { buffer.prepend(0, &format!("{}:{}:{}: ", loc.file.name, cm.doctest_offset_line(loc.line), loc.col.0 + 1), Style::LineAndColumn); } } else if !self.short_message { // remember where we are in the output buffer for easy reference let buffer_msg_line_offset = buffer.num_lines(); // Add spacing line draw_col_separator(&mut buffer, buffer_msg_line_offset, max_line_num_len + 1); // Then, the secondary file indicator buffer.prepend(buffer_msg_line_offset + 1, "::: ", Style::LineNumber); let loc = if let Some(first_line) = annotated_file.lines.first() { let col = if let Some(first_annotation) = first_line.annotations.first() { format!(":{}", first_annotation.start_col + 1) } else { "".to_string() }; format!("{}:{}{}", annotated_file.file.name, cm.doctest_offset_line(first_line.line_index), col) } else { annotated_file.file.name.to_string() }; buffer.append(buffer_msg_line_offset + 1, &loc, Style::LineAndColumn); for _ in 0..max_line_num_len { buffer.prepend(buffer_msg_line_offset + 1, " ", Style::NoStyle); } } if !self.short_message { // Put in the spacer between the location and annotated source let buffer_msg_line_offset = buffer.num_lines(); draw_col_separator_no_space(&mut buffer, buffer_msg_line_offset, max_line_num_len + 1); // Contains the vertical lines' positions for active multiline annotations let mut multilines = HashMap::new(); // Next, output the annotate source for this file for line_idx in 0..annotated_file.lines.len() { let previous_buffer_line = buffer.num_lines(); let width_offset = 3 + max_line_num_len; let code_offset = if annotated_file.multiline_depth == 0 { width_offset } else { width_offset + annotated_file.multiline_depth + 1 }; let depths = self.render_source_line(&mut buffer, annotated_file.file.clone(), &annotated_file.lines[line_idx], width_offset, code_offset); let mut to_add = HashMap::new(); for (depth, style) in depths { if multilines.get(&depth).is_some() { multilines.remove(&depth); } else { to_add.insert(depth, style); } } // Set the multiline annotation vertical lines to the left of // the code in this line. for (depth, style) in &multilines { for line in previous_buffer_line..buffer.num_lines() { draw_multiline_line(&mut buffer, line, width_offset, *depth, *style); } } // check to see if we need to print out or elide lines that come between // this annotated line and the next one. if line_idx < (annotated_file.lines.len() - 1) { let line_idx_delta = annotated_file.lines[line_idx + 1].line_index - annotated_file.lines[line_idx].line_index; if line_idx_delta > 2 { let last_buffer_line_num = buffer.num_lines(); buffer.puts(last_buffer_line_num, 0, "...", Style::LineNumber); // Set the multiline annotation vertical lines on `...` bridging line. for (depth, style) in &multilines { draw_multiline_line(&mut buffer, last_buffer_line_num, width_offset, *depth, *style); } } else if line_idx_delta == 2 { let unannotated_line = annotated_file.file .get_line(annotated_file.lines[line_idx].line_index) .unwrap_or_else(|| Cow::from("")); let last_buffer_line_num = buffer.num_lines(); buffer.puts(last_buffer_line_num, 0, &self.maybe_anonymized(annotated_file.lines[line_idx + 1] .line_index - 1), Style::LineNumber); draw_col_separator(&mut buffer, last_buffer_line_num, 1 + max_line_num_len); buffer.puts(last_buffer_line_num, code_offset, &unannotated_line, Style::Quotation); for (depth, style) in &multilines { draw_multiline_line(&mut buffer, last_buffer_line_num, width_offset, *depth, *style); } } } multilines.extend(&to_add); } } } // final step: take our styled buffer, render it, then output it emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?; Ok(()) } fn emit_suggestion_default(&mut self, suggestion: &CodeSuggestion, level: &Level, max_line_num_len: usize) -> io::Result<()> { if let Some(ref cm) = self.cm { let mut buffer = StyledBuffer::new(); // Render the suggestion message let level_str = level.to_string(); if !level_str.is_empty() { buffer.append(0, &level_str, Style::Level(level.clone())); buffer.append(0, ": ", Style::HeaderMsg); } self.msg_to_buffer(&mut buffer, &[(suggestion.msg.to_owned(), Style::NoStyle)], max_line_num_len, "suggestion", Some(Style::HeaderMsg)); // Render the replacements for each suggestion let suggestions = suggestion.splice_lines(&**cm); let mut row_num = 2; for &(ref complete, ref parts) in suggestions.iter().take(MAX_SUGGESTIONS) { // Only show underline if the suggestion spans a single line and doesn't cover the // entirety of the code output. If you have multiple replacements in the same line // of code, show the underline. let show_underline = !(parts.len() == 1 && parts[0].snippet.trim() == complete.trim()) && complete.lines().count() == 1; let lines = cm.span_to_lines(parts[0].span).unwrap(); assert!(!lines.lines.is_empty()); let line_start = cm.lookup_char_pos(parts[0].span.lo()).line; draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1); let mut line_pos = 0; let mut lines = complete.lines(); for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) { // Print the span column to avoid confusion buffer.puts(row_num, 0, &self.maybe_anonymized(line_start + line_pos), Style::LineNumber); // print the suggestion draw_col_separator(&mut buffer, row_num, max_line_num_len + 1); buffer.append(row_num, line, Style::NoStyle); line_pos += 1; row_num += 1; } // This offset and the ones below need to be signed to account for replacement code // that is shorter than the original code. let mut offset: isize = 0; // Only show an underline in the suggestions if the suggestion is not the // entirety of the code being shown and the displayed code is not multiline. if show_underline { draw_col_separator(&mut buffer, row_num, max_line_num_len + 1); for part in parts { let span_start_pos = cm.lookup_char_pos(part.span.lo()).col_display; let span_end_pos = cm.lookup_char_pos(part.span.hi()).col_display; // Do not underline the leading... let start = part.snippet.len() .saturating_sub(part.snippet.trim_left().len()); // ...or trailing spaces. Account for substitutions containing unicode // characters. let sub_len = part.snippet.trim().chars().fold(0, |acc, ch| { acc + unicode_width::UnicodeWidthChar::width(ch).unwrap_or(0) }); let underline_start = (span_start_pos + start) as isize + offset; let underline_end = (span_start_pos + start + sub_len) as isize + offset; for p in underline_start..underline_end { buffer.putc(row_num, max_line_num_len + 3 + p as usize, '^', Style::UnderlinePrimary); } // underline removals too if underline_start == underline_end { for p in underline_start-1..underline_start+1 { buffer.putc(row_num, max_line_num_len + 3 + p as usize, '-', Style::UnderlineSecondary); } } // length of the code after substitution let full_sub_len = part.snippet.chars().fold(0, |acc, ch| { acc + unicode_width::UnicodeWidthChar::width(ch).unwrap_or(0) as isize }); // length of the code to be substituted let snippet_len = span_end_pos as isize - span_start_pos as isize; // For multiple substitutions, use the position *after* the previous // substitutions have happened. offset += full_sub_len - snippet_len; } row_num += 1; } // if we elided some lines, add an ellipsis if lines.next().is_some() { buffer.puts(row_num, max_line_num_len - 1, "...", Style::LineNumber); } else if !show_underline { draw_col_separator_no_space(&mut buffer, row_num, max_line_num_len + 1); row_num += 1; } } if suggestions.len() > MAX_SUGGESTIONS { let msg = format!("and {} other candidates", suggestions.len() - MAX_SUGGESTIONS); buffer.puts(row_num, 0, &msg, Style::NoStyle); } emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?; } Ok(()) } fn emit_messages_default(&mut self, level: &Level, message: &[(String, Style)], code: &Option<DiagnosticId>, span: &MultiSpan, children: &[SubDiagnostic], suggestions: &[CodeSuggestion]) { let max_line_num_len = if self.ui_testing { ANONYMIZED_LINE_NUM.len() } else { self.get_max_line_num(span, children).to_string().len() }; match self.emit_message_default(span, message, code, level, max_line_num_len, false) { Ok(()) => { if !children.is_empty() { let mut buffer = StyledBuffer::new(); if !self.short_message { draw_col_separator_no_space(&mut buffer, 0, max_line_num_len + 1); } match emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message) { Ok(()) => (), Err(e) => panic!("failed to emit error: {}", e) } } if !self.short_message { for child in children { let span = child.render_span.as_ref().unwrap_or(&child.span); match self.emit_message_default(&span, &child.styled_message(), &None, &child.level, max_line_num_len, true) { Err(e) => panic!("failed to emit error: {}", e), _ => () } } for sugg in suggestions { match self.emit_suggestion_default(sugg, &Level::Help, max_line_num_len) { Err(e) => panic!("failed to emit error: {}", e), _ => () } } } } Err(e) => panic!("failed to emit error: {}", e), } let mut dst = self.dst.writable(); match write!(dst, "\n") { Err(e) => panic!("failed to emit error: {}", e), _ => { match dst.flush() { Err(e) => panic!("failed to emit error: {}", e), _ => (), } } } } } fn draw_col_separator(buffer: &mut StyledBuffer, line: usize, col: usize) { buffer.puts(line, col, "| ", Style::LineNumber); } fn draw_col_separator_no_space(buffer: &mut StyledBuffer, line: usize, col: usize) { draw_col_separator_no_space_with_style(buffer, line, col, Style::LineNumber); } fn draw_col_separator_no_space_with_style(buffer: &mut StyledBuffer, line: usize, col: usize, style: Style) { buffer.putc(line, col, '|', style); } fn draw_range(buffer: &mut StyledBuffer, symbol: char, line: usize, col_from: usize, col_to: usize, style: Style) { for col in col_from..col_to { buffer.putc(line, col, symbol, style); } } fn draw_note_separator(buffer: &mut StyledBuffer, line: usize, col: usize) { buffer.puts(line, col, "= ", Style::LineNumber); } fn draw_multiline_line(buffer: &mut StyledBuffer, line: usize, offset: usize, depth: usize, style: Style) { buffer.putc(line, offset + depth - 1, '|', style); } fn num_overlap(a_start: usize, a_end: usize, b_start: usize, b_end:usize, inclusive: bool) -> bool { let extra = if inclusive { 1 } else { 0 }; (b_start..b_end + extra).contains(&a_start) || (a_start..a_end + extra).contains(&b_start) } fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool { num_overlap(a1.start_col, a1.end_col + padding, a2.start_col, a2.end_col, false) } fn emit_to_destination(rendered_buffer: &[Vec<StyledString>], lvl: &Level, dst: &mut Destination, short_message: bool) -> io::Result<()> { use lock; let mut dst = dst.writable(); // In order to prevent error message interleaving, where multiple error lines get intermixed // when multiple compiler processes error simultaneously, we emit errors with additional // steps. // // On Unix systems, we write into a buffered terminal rather than directly to a terminal. When // the .flush() is called we take the buffer created from the buffered writes and write it at // one shot. Because the Unix systems use ANSI for the colors, which is a text-based styling // scheme, this buffered approach works and maintains the styling. // // On Windows, styling happens through calls to a terminal API. This prevents us from using the // same buffering approach. Instead, we use a global Windows mutex, which we acquire long // enough to output the full error message, then we release. let _buffer_lock = lock::acquire_global_lock("rustc_errors"); for (pos, line) in rendered_buffer.iter().enumerate() { for part in line { dst.apply_style(lvl.clone(), part.style)?; write!(dst, "{}", part.text)?; dst.reset()?; } if !short_message && (!lvl.is_failure_note() || pos != rendered_buffer.len() - 1) { write!(dst, "\n")?; } } dst.flush()?; Ok(()) } pub enum Destination { Terminal(StandardStream), Buffered(BufferWriter), Raw(Box<dyn Write + Send>), } pub enum WritableDst<'a> { Terminal(&'a mut StandardStream), Buffered(&'a mut BufferWriter, Buffer), Raw(&'a mut Box<dyn Write + Send>), } impl Destination { fn from_stderr(color: ColorConfig) -> Destination { let choice = color.to_color_choice(); // On Windows we'll be performing global synchronization on the entire // system for emitting rustc errors, so there's no need to buffer // anything. // // On non-Windows we rely on the atomicity of `write` to ensure errors // don't get all jumbled up. if cfg!(windows) { Terminal(StandardStream::stderr(choice)) } else { Buffered(BufferWriter::stderr(choice)) } } fn writable<'a>(&'a mut self) -> WritableDst<'a> { match *self { Destination::Terminal(ref mut t) => WritableDst::Terminal(t), Destination::Buffered(ref mut t) => { let buf = t.buffer(); WritableDst::Buffered(t, buf) } Destination::Raw(ref mut t) => WritableDst::Raw(t), } } } impl<'a> WritableDst<'a> { fn apply_style(&mut self, lvl: Level, style: Style) -> io::Result<()> { let mut spec = ColorSpec::new(); match style { Style::LineAndColumn => {} Style::LineNumber => { spec.set_bold(true); spec.set_intense(true); if cfg!(windows) { spec.set_fg(Some(Color::Cyan)); } else { spec.set_fg(Some(Color::Blue)); } } Style::Quotation => {} Style::OldSchoolNoteText | Style::MainHeaderMsg => { spec.set_bold(true); if cfg!(windows) { spec.set_intense(true) .set_fg(Some(Color::White)); } } Style::UnderlinePrimary | Style::LabelPrimary => { spec = lvl.color(); spec.set_bold(true); } Style::UnderlineSecondary | Style::LabelSecondary => { spec.set_bold(true) .set_intense(true); if cfg!(windows) { spec.set_fg(Some(Color::Cyan)); } else { spec.set_fg(Some(Color::Blue)); } } Style::HeaderMsg | Style::NoStyle => {} Style::Level(lvl) => { spec = lvl.color(); spec.set_bold(true); } Style::Highlight => { spec.set_bold(true); } } self.set_color(&spec) } fn set_color(&mut self, color: &ColorSpec) -> io::Result<()> { match *self { WritableDst::Terminal(ref mut t) => t.set_color(color), WritableDst::Buffered(_, ref mut t) => t.set_color(color), WritableDst::Raw(_) => Ok(()) } } fn reset(&mut self) -> io::Result<()> { match *self { WritableDst::Terminal(ref mut t) => t.reset(), WritableDst::Buffered(_, ref mut t) => t.reset(), WritableDst::Raw(_) => Ok(()), } } } impl<'a> Write for WritableDst<'a> { fn write(&mut self, bytes: &[u8]) -> io::Result<usize> { match *self { WritableDst::Terminal(ref mut t) => t.write(bytes), WritableDst::Buffered(_, ref mut buf) => buf.write(bytes), WritableDst::Raw(ref mut w) => w.write(bytes), } } fn flush(&mut self) -> io::Result<()> { match *self { WritableDst::Terminal(ref mut t) => t.flush(), WritableDst::Buffered(_, ref mut buf) => buf.flush(), WritableDst::Raw(ref mut w) => w.flush(), } } } impl<'a> Drop for WritableDst<'a> { fn drop(&mut self) { match *self { WritableDst::Buffered(ref mut dst, ref mut buf) => { drop(dst.print(buf)); } _ => {} } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/lock.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Bindings to acquire a global named lock. //! //! This is intended to be used to synchronize multiple compiler processes to //! ensure that we can output complete errors without interleaving on Windows. //! Note that this is currently only needed for allowing only one 32-bit MSVC //! linker to execute at once on MSVC hosts, so this is only implemented for //! `cfg(windows)`. Also note that this may not always be used on Windows, //! only when targeting 32-bit MSVC. //! //! For more information about why this is necessary, see where this is called. use std::any::Any; #[cfg(windows)] #[allow(bad_style)] pub fn acquire_global_lock(name: &str) -> Box<dyn Any> { use std::ffi::CString; use std::io; type LPSECURITY_ATTRIBUTES = *mut u8; type BOOL = i32; type LPCSTR = *const u8; type HANDLE = *mut u8; type DWORD = u32; const INFINITE: DWORD = !0; const WAIT_OBJECT_0: DWORD = 0; const WAIT_ABANDONED: DWORD = 0x00000080; extern "system" { fn CreateMutexA( lpMutexAttributes: LPSECURITY_ATTRIBUTES, bInitialOwner: BOOL, lpName: LPCSTR, ) -> HANDLE; fn WaitForSingleObject(hHandle: HANDLE, dwMilliseconds: DWORD) -> DWORD; fn ReleaseMutex(hMutex: HANDLE) -> BOOL; fn CloseHandle(hObject: HANDLE) -> BOOL; } struct Handle(HANDLE); impl Drop for Handle { fn drop(&mut self) { unsafe { CloseHandle(self.0); } } } struct Guard(Handle); impl Drop for Guard { fn drop(&mut self) { unsafe { ReleaseMutex((self.0).0); } } } let cname = CString::new(name).unwrap(); unsafe { // Create a named mutex, with no security attributes and also not // acquired when we create it. // // This will silently create one if it doesn't already exist, or it'll // open up a handle to one if it already exists. let mutex = CreateMutexA(0 as *mut _, 0, cname.as_ptr() as *const u8); if mutex.is_null() { panic!( "failed to create global mutex named `{}`: {}", name, io::Error::last_os_error() ); } let mutex = Handle(mutex); // Acquire the lock through `WaitForSingleObject`. // // A return value of `WAIT_OBJECT_0` means we successfully acquired it. // // A return value of `WAIT_ABANDONED` means that the previous holder of // the thread exited without calling `ReleaseMutex`. This can happen, // for example, when the compiler crashes or is interrupted via ctrl-c // or the like. In this case, however, we are still transferred // ownership of the lock so we continue. // // If an error happens.. well... that's surprising! match WaitForSingleObject(mutex.0, INFINITE) { WAIT_OBJECT_0 | WAIT_ABANDONED => {} code => { panic!( "WaitForSingleObject failed on global mutex named \ `{}`: {} (ret={:x})", name, io::Error::last_os_error(), code ); } } // Return a guard which will call `ReleaseMutex` when dropped. Box::new(Guard(mutex)) } } #[cfg(not(windows))] pub fn acquire_global_lock(_name: &str) -> Box<dyn Any> { Box::new(()) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_errors/diagnostic.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use CodeSuggestion; use SubstitutionPart; use Substitution; use Applicability; use Level; use std::fmt; use syntax_pos::{MultiSpan, Span}; use snippet::Style; #[must_use] #[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub struct Diagnostic { pub level: Level, pub message: Vec<(String, Style)>, pub code: Option<DiagnosticId>, pub span: MultiSpan, pub children: Vec<SubDiagnostic>, pub suggestions: Vec<CodeSuggestion>, } #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum DiagnosticId { Error(String), Lint(String), } /// For example a note attached to an error. #[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] pub struct SubDiagnostic { pub level: Level, pub message: Vec<(String, Style)>, pub span: MultiSpan, pub render_span: Option<MultiSpan>, } #[derive(PartialEq, Eq)] pub struct DiagnosticStyledString(pub Vec<StringPart>); impl DiagnosticStyledString { pub fn new() -> DiagnosticStyledString { DiagnosticStyledString(vec![]) } pub fn push_normal<S: Into<String>>(&mut self, t: S) { self.0.push(StringPart::Normal(t.into())); } pub fn push_highlighted<S: Into<String>>(&mut self, t: S) { self.0.push(StringPart::Highlighted(t.into())); } pub fn normal<S: Into<String>>(t: S) -> DiagnosticStyledString { DiagnosticStyledString(vec![StringPart::Normal(t.into())]) } pub fn highlighted<S: Into<String>>(t: S) -> DiagnosticStyledString { DiagnosticStyledString(vec![StringPart::Highlighted(t.into())]) } pub fn content(&self) -> String { self.0.iter().map(|x| x.content()).collect::<String>() } } #[derive(PartialEq, Eq)] pub enum StringPart { Normal(String), Highlighted(String), } impl StringPart { pub fn content(&self) -> String { match self { &StringPart::Normal(ref s) | & StringPart::Highlighted(ref s) => s.to_owned() } } } impl Diagnostic { pub fn new(level: Level, message: &str) -> Self { Diagnostic::new_with_code(level, None, message) } pub fn new_with_code(level: Level, code: Option<DiagnosticId>, message: &str) -> Self { Diagnostic { level, message: vec![(message.to_owned(), Style::NoStyle)], code, span: MultiSpan::new(), children: vec![], suggestions: vec![], } } pub fn is_error(&self) -> bool { match self.level { Level::Bug | Level::Fatal | Level::PhaseFatal | Level::Error | Level::FailureNote => { true } Level::Warning | Level::Note | Level::Help | Level::Cancelled => { false } } } /// Cancel the diagnostic (a structured diagnostic must either be emitted or /// canceled or it will panic when dropped). pub fn cancel(&mut self) { self.level = Level::Cancelled; } pub fn cancelled(&self) -> bool { self.level == Level::Cancelled } /// Add a span/label to be included in the resulting snippet. /// This is pushed onto the `MultiSpan` that was created when the /// diagnostic was first built. If you don't call this function at /// all, and you just supplied a `Span` to create the diagnostic, /// then the snippet will just include that `Span`, which is /// called the primary span. pub fn span_label<T: Into<String>>(&mut self, span: Span, label: T) -> &mut Self { self.span.push_span_label(span, label.into()); self } pub fn note_expected_found(&mut self, label: &dyn fmt::Display, expected: DiagnosticStyledString, found: DiagnosticStyledString) -> &mut Self { self.note_expected_found_extra(label, expected, found, &"", &"") } pub fn note_expected_found_extra(&mut self, label: &dyn fmt::Display, expected: DiagnosticStyledString, found: DiagnosticStyledString, expected_extra: &dyn fmt::Display, found_extra: &dyn fmt::Display) -> &mut Self { let mut msg: Vec<_> = vec![(format!("expected {} `", label), Style::NoStyle)]; msg.extend(expected.0.iter() .map(|x| match *x { StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), })); msg.push((format!("`{}\n", expected_extra), Style::NoStyle)); msg.push((format!(" found {} `", label), Style::NoStyle)); msg.extend(found.0.iter() .map(|x| match *x { StringPart::Normal(ref s) => (s.to_owned(), Style::NoStyle), StringPart::Highlighted(ref s) => (s.to_owned(), Style::Highlight), })); msg.push((format!("`{}", found_extra), Style::NoStyle)); // For now, just attach these as notes self.highlighted_note(msg); self } pub fn note_trait_signature(&mut self, name: String, signature: String) -> &mut Self { self.highlighted_note(vec![ (format!("`{}` from trait: `", name), Style::NoStyle), (signature, Style::Highlight), ("`".to_string(), Style::NoStyle)]); self } pub fn note(&mut self, msg: &str) -> &mut Self { self.sub(Level::Note, msg, MultiSpan::new(), None); self } pub fn highlighted_note(&mut self, msg: Vec<(String, Style)>) -> &mut Self { self.sub_with_highlights(Level::Note, msg, MultiSpan::new(), None); self } pub fn span_note<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self { self.sub(Level::Note, msg, sp.into(), None); self } pub fn warn(&mut self, msg: &str) -> &mut Self { self.sub(Level::Warning, msg, MultiSpan::new(), None); self } pub fn span_warn<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self { self.sub(Level::Warning, msg, sp.into(), None); self } pub fn help(&mut self , msg: &str) -> &mut Self { self.sub(Level::Help, msg, MultiSpan::new(), None); self } pub fn span_help<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self { self.sub(Level::Help, msg, sp.into(), None); self } /// Prints out a message with a suggested edit of the code. If the suggestion is presented /// inline it will only show the text message and not the text. /// /// See `CodeSuggestion` for more information. pub fn span_suggestion_short(&mut self, sp: Span, msg: &str, suggestion: String) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { parts: vec![SubstitutionPart { snippet: suggestion, span: sp, }], }], msg: msg.to_owned(), show_code_when_inline: false, applicability: Applicability::Unspecified, }); self } /// Prints out a message with a suggested edit of the code. /// /// In case of short messages and a simple suggestion, /// rustc displays it as a label like /// /// "try adding parentheses: `(tup.0).1`" /// /// The message /// /// * should not end in any punctuation (a `:` is added automatically) /// * should not be a question /// * should not contain any parts like "the following", "as shown" /// * may look like "to do xyz, use" or "to do xyz, use abc" /// * may contain a name of a function, variable or type, but not whole expressions /// /// See `CodeSuggestion` for more information. pub fn span_suggestion(&mut self, sp: Span, msg: &str, suggestion: String) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { parts: vec![SubstitutionPart { snippet: suggestion, span: sp, }], }], msg: msg.to_owned(), show_code_when_inline: true, applicability: Applicability::Unspecified, }); self } pub fn multipart_suggestion( &mut self, msg: &str, suggestion: Vec<(Span, String)>, ) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { parts: suggestion .into_iter() .map(|(span, snippet)| SubstitutionPart { snippet, span }) .collect(), }], msg: msg.to_owned(), show_code_when_inline: true, applicability: Applicability::Unspecified, }); self } /// Prints out a message with multiple suggested edits of the code. pub fn span_suggestions(&mut self, sp: Span, msg: &str, suggestions: Vec<String>) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: suggestions.into_iter().map(|snippet| Substitution { parts: vec![SubstitutionPart { snippet, span: sp, }], }).collect(), msg: msg.to_owned(), show_code_when_inline: true, applicability: Applicability::Unspecified, }); self } /// This is a suggestion that may contain mistakes or fillers and should /// be read and understood by a human. pub fn span_suggestion_with_applicability(&mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { parts: vec![SubstitutionPart { snippet: suggestion, span: sp, }], }], msg: msg.to_owned(), show_code_when_inline: true, applicability, }); self } pub fn span_suggestions_with_applicability(&mut self, sp: Span, msg: &str, suggestions: Vec<String>, applicability: Applicability) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: suggestions.into_iter().map(|snippet| Substitution { parts: vec![SubstitutionPart { snippet, span: sp, }], }).collect(), msg: msg.to_owned(), show_code_when_inline: true, applicability, }); self } pub fn span_suggestion_short_with_applicability( &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability ) -> &mut Self { self.suggestions.push(CodeSuggestion { substitutions: vec![Substitution { parts: vec![SubstitutionPart { snippet: suggestion, span: sp, }], }], msg: msg.to_owned(), show_code_when_inline: false, applicability: applicability, }); self } pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self { self.span = sp.into(); self } pub fn code(&mut self, s: DiagnosticId) -> &mut Self { self.code = Some(s); self } pub fn get_code(&self) -> Option<DiagnosticId> { self.code.clone() } pub fn message(&self) -> String { self.message.iter().map(|i| i.0.to_owned()).collect::<String>() } pub fn styled_message(&self) -> &Vec<(String, Style)> { &self.message } /// Used by a lint. Copies over all details *but* the "main /// message". pub fn copy_details_not_message(&mut self, from: &Diagnostic) { self.span = from.span.clone(); self.code = from.code.clone(); self.children.extend(from.children.iter().cloned()) } /// Convenience function for internal use, clients should use one of the /// public methods above. pub fn sub(&mut self, level: Level, message: &str, span: MultiSpan, render_span: Option<MultiSpan>) { let sub = SubDiagnostic { level, message: vec![(message.to_owned(), Style::NoStyle)], span, render_span, }; self.children.push(sub); } /// Convenience function for internal use, clients should use one of the /// public methods above. fn sub_with_highlights(&mut self, level: Level, message: Vec<(String, Style)>, span: MultiSpan, render_span: Option<MultiSpan>) { let sub = SubDiagnostic { level, message, span, render_span, }; self.children.push(sub); } } impl SubDiagnostic { pub fn message(&self) -> String { self.message.iter().map(|i| i.0.to_owned()).collect::<String>() } pub fn styled_message(&self) -> &Vec<(String, Style)> { &self.message } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/early_buffered_lints.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Allows the buffering of lints for later. //! //! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat //! redundant. Later, these types can be converted to types for use by the rest of the compiler. use syntax::ast::NodeId; use syntax_pos::MultiSpan; /// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be /// passed to `rustc::lint::Lint::from_parser_lint_id` to get a `rustc::lint::Lint`. pub enum BufferedEarlyLintId { /// Usage of `?` as a macro separator is deprecated. QuestionMarkMacroSep, } /// Stores buffered lint info which can later be passed to `librustc`. pub struct BufferedEarlyLint { /// The span of code that we are linting on. pub span: MultiSpan, /// The lint message. pub msg: String, /// The `NodeId` of the AST node that generated the lint. pub id: NodeId, /// A lint Id that can be passed to `rustc::lint::Lint::from_parser_lint_id`. pub lint_id: BufferedEarlyLintId, }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/show_span.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Span debugger //! //! This module shows spans for all expressions in the crate //! to help with compiler debugging. use std::str::FromStr; use ast; use errors; use visit; use visit::Visitor; enum Mode { Expression, Pattern, Type, } impl FromStr for Mode { type Err = (); fn from_str(s: &str) -> Result<Mode, ()> { let mode = match s { "expr" => Mode::Expression, "pat" => Mode::Pattern, "ty" => Mode::Type, _ => return Err(()) }; Ok(mode) } } struct ShowSpanVisitor<'a> { span_diagnostic: &'a errors::Handler, mode: Mode, } impl<'a> Visitor<'a> for ShowSpanVisitor<'a> { fn visit_expr(&mut self, e: &'a ast::Expr) { if let Mode::Expression = self.mode { self.span_diagnostic.span_warn(e.span, "expression"); } visit::walk_expr(self, e); } fn visit_pat(&mut self, p: &'a ast::Pat) { if let Mode::Pattern = self.mode { self.span_diagnostic.span_warn(p.span, "pattern"); } visit::walk_pat(self, p); } fn visit_ty(&mut self, t: &'a ast::Ty) { if let Mode::Type = self.mode { self.span_diagnostic.span_warn(t.span, "type"); } visit::walk_ty(self, t); } fn visit_mac(&mut self, mac: &'a ast::Mac) { visit::walk_mac(self, mac); } } pub fn run(span_diagnostic: &errors::Handler, mode: &str, krate: &ast::Crate) { let mode = match mode.parse().ok() { Some(mode) => mode, None => return }; let mut v = ShowSpanVisitor { span_diagnostic, mode, }; visit::walk_crate(&mut v, krate); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/Cargo.toml
[package] authors = ["The Rust Project Developers"] name = "syntax" version = "0.0.0" [lib] name = "syntax" path = "lib.rs" [dependencies] bitflags = "1.0" serialize = { path = "../libserialize" } log = "0.4" scoped-tls = "0.1" syntax_pos = { path = "../libsyntax_pos" } rustc_errors = { path = "../librustc_errors" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_target = { path = "../librustc_target" } smallvec = { version = "0.6.5", features = ["union"] }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/test.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Code that generates a test runner to run all the tests in a crate #![allow(dead_code)] #![allow(unused_imports)] use self::HasTestSignature::*; use std::iter; use std::slice; use std::mem; use std::vec; use attr::{self, HasAttrs}; use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos}; use source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned}; use errors; use config; use entry::{self, EntryPointType}; use ext::base::{ExtCtxt, Resolver}; use ext::build::AstBuilder; use ext::expand::ExpansionConfig; use ext::hygiene::{self, Mark, SyntaxContext}; use fold::Folder; use feature_gate::Features; use util::move_map::MoveMap; use fold; use parse::{token, ParseSess}; use print::pprust; use ast::{self, Ident}; use ptr::P; use OneVector; use symbol::{self, Symbol, keywords}; use ThinVec; use rustc_data_structures::small_vec::ExpectOne; enum ShouldPanic { No, Yes(Option<Symbol>), } struct Test { span: Span, path: Vec<Ident> , bench: bool, ignore: bool, should_panic: ShouldPanic, allow_fail: bool, } struct TestCtxt<'a> { span_diagnostic: &'a errors::Handler, path: Vec<Ident>, ext_cx: ExtCtxt<'a>, testfns: Vec<Test>, reexport_test_harness_main: Option<Symbol>, is_libtest: bool, ctxt: SyntaxContext, features: &'a Features, // top-level re-export submodule, filled out after folding is finished toplevel_reexport: Option<Ident>, } // Traverse the crate, collecting all the test functions, eliding any // existing main functions, and synthesizing a main test harness pub fn modify_for_testing(sess: &ParseSess, resolver: &mut dyn Resolver, should_test: bool, krate: ast::Crate, span_diagnostic: &errors::Handler, features: &Features) -> ast::Crate { // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use __test::main as some_name;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = attr::first_attr_value_str_by_name(&krate.attrs, "reexport_test_harness_main"); if should_test { generate_test_harness(sess, resolver, reexport_test_harness_main, krate, span_diagnostic, features) } else { krate } } struct TestHarnessGenerator<'a> { cx: TestCtxt<'a>, tests: Vec<Ident>, // submodule name, gensym'd identifier for re-exports tested_submods: Vec<(Ident, Ident)>, } impl<'a> fold::Folder for TestHarnessGenerator<'a> { fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate { let mut folded = fold::noop_fold_crate(c, self); // Add a special __test module to the crate that will contain code // generated for the test harness let (mod_, reexport) = mk_test_module(&mut self.cx); if let Some(re) = reexport { folded.module.items.push(re) } folded.module.items.push(mod_); folded } fn fold_item(&mut self, i: P<ast::Item>) -> OneVector<P<ast::Item>> { let ident = i.ident; if ident.name != keywords::Invalid.name() { self.cx.path.push(ident); } debug!("current path: {}", path_name_i(&self.cx.path)); if is_test_fn(&self.cx, &i) || is_bench_fn(&self.cx, &i) { match i.node { ast::ItemKind::Fn(_, header, _, _) => { if header.unsafety == ast::Unsafety::Unsafe { let diag = self.cx.span_diagnostic; diag.span_fatal( i.span, "unsafe functions cannot be used for tests" ).raise(); } if header.asyncness.is_async() { let diag = self.cx.span_diagnostic; diag.span_fatal( i.span, "async functions cannot be used for tests" ).raise(); } } _ => {}, } debug!("this is a test function"); let test = Test { span: i.span, path: self.cx.path.clone(), bench: is_bench_fn(&self.cx, &i), ignore: is_ignored(&i), should_panic: should_panic(&i, &self.cx), allow_fail: is_allowed_fail(&i), }; self.cx.testfns.push(test); self.tests.push(i.ident); } let mut item = i.into_inner(); // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things if let ast::ItemKind::Mod(module) = item.node { let tests = mem::replace(&mut self.tests, Vec::new()); let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); let mut mod_folded = fold::noop_fold_mod(module, self); let tests = mem::replace(&mut self.tests, tests); let tested_submods = mem::replace(&mut self.tested_submods, tested_submods); if !tests.is_empty() || !tested_submods.is_empty() { let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods); mod_folded.items.push(it); if !self.cx.path.is_empty() { self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym)); } else { debug!("pushing nothing, sym: {:?}", sym); self.cx.toplevel_reexport = Some(sym); } } item.node = ast::ItemKind::Mod(mod_folded); } if ident.name != keywords::Invalid.name() { self.cx.path.pop(); } smallvec![P(item)] } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } } struct EntryPointCleaner { // Current depth in the ast depth: usize, } impl fold::Folder for EntryPointCleaner { fn fold_item(&mut self, i: P<ast::Item>) -> OneVector<P<ast::Item>> { self.depth += 1; let folded = fold::noop_fold_item(i, self).expect_one("noop did something"); self.depth -= 1; // Remove any #[main] or #[start] from the AST so it doesn't // clash with the one we're going to add, but mark it as // #[allow(dead_code)] to avoid printing warnings. let folded = match entry::entry_point_type(&folded, self.depth) { EntryPointType::MainNamed | EntryPointType::MainAttr | EntryPointType::Start => folded.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { let allow_ident = Ident::from_str("allow"); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident, vec![dc_nested]); let allow_dead_code = attr::mk_attr_outer(DUMMY_SP, attr::mk_attr_id(), allow_dead_code_item); ast::Item { id, ident, attrs: attrs.into_iter() .filter(|attr| { !attr.check_name("main") && !attr.check_name("start") }) .chain(iter::once(allow_dead_code)) .collect(), node, vis, span, tokens, } }), EntryPointType::None | EntryPointType::OtherMain => folded, }; smallvec![folded] } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } } fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<Ident>, tested_submods: Vec<(Ident, Ident)>) -> (P<ast::Item>, Ident) { let super_ = Ident::from_str("super"); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), cx.ext_cx.path(DUMMY_SP, vec![super_, r])) }).chain(tested_submods.into_iter().map(|(r, sym)| { let path = cx.ext_cx.path(DUMMY_SP, vec![super_, r, sym]); cx.ext_cx.item_use_simple_(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), Some(r), path) })).collect(); let reexport_mod = ast::Mod { inner: DUMMY_SP, items, }; let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports")); let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item { ident: sym, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Mod(reexport_mod), vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, })).pop().unwrap(); (it, sym) } fn generate_test_harness(sess: &ParseSess, resolver: &mut dyn Resolver, reexport_test_harness_main: Option<Symbol>, krate: ast::Crate, sd: &errors::Handler, features: &Features) -> ast::Crate { // Remove the entry points let mut cleaner = EntryPointCleaner { depth: 0 }; let krate = cleaner.fold_crate(krate); let mark = Mark::fresh(Mark::root()); let mut econfig = ExpansionConfig::default("test".to_string()); econfig.features = Some(features); let cx = TestCtxt { span_diagnostic: sd, ext_cx: ExtCtxt::new(sess, econfig, resolver), path: Vec::new(), testfns: Vec::new(), reexport_test_harness_main, // NB: doesn't consider the value of `--crate-name` passed on the command line. is_libtest: attr::find_crate_name(&krate.attrs).map(|s| s == "test").unwrap_or(false), toplevel_reexport: None, ctxt: SyntaxContext::empty().apply_mark(mark), features, }; mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, def_site: None, format: MacroAttribute(Symbol::intern("test")), allow_internal_unstable: true, allow_internal_unsafe: false, local_inner_macros: false, edition: hygiene::default_edition(), }); TestHarnessGenerator { cx, tests: Vec::new(), tested_submods: Vec::new(), }.fold_crate(krate) } /// Craft a span that will be ignored by the stability lint's /// call to source_map's `is_internal` check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { sp.with_ctxt(cx.ctxt) } enum HasTestSignature { Yes, No(BadTestSignature), } #[derive(PartialEq)] enum BadTestSignature { NotEvenAFunction, WrongTypeSignature, NoArgumentsAllowed, ShouldPanicOnlyWithNoArgs, } fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { let has_test_attr = attr::contains_name(&i.attrs, "test"); fn has_test_signature(_cx: &TestCtxt, i: &ast::Item) -> HasTestSignature { let has_should_panic_attr = attr::contains_name(&i.attrs, "should_panic"); match i.node { ast::ItemKind::Fn(ref decl, _, ref generics, _) => { // If the termination trait is active, the compiler will check that the output // type implements the `Termination` trait as `libtest` enforces that. let has_output = match decl.output { ast::FunctionRetTy::Default(..) => false, ast::FunctionRetTy::Ty(ref t) if t.node.is_unit() => false, _ => true }; if !decl.inputs.is_empty() { return No(BadTestSignature::NoArgumentsAllowed); } match (has_output, has_should_panic_attr) { (true, true) => No(BadTestSignature::ShouldPanicOnlyWithNoArgs), (true, false) => if !generics.params.is_empty() { No(BadTestSignature::WrongTypeSignature) } else { Yes }, (false, _) => Yes } } _ => No(BadTestSignature::NotEvenAFunction), } } let has_test_signature = if has_test_attr { let diag = cx.span_diagnostic; match has_test_signature(cx, i) { Yes => true, No(cause) => { match cause { BadTestSignature::NotEvenAFunction => diag.span_err(i.span, "only functions may be used as tests"), BadTestSignature::WrongTypeSignature => diag.span_err(i.span, "functions used as tests must have signature fn() -> ()"), BadTestSignature::NoArgumentsAllowed => diag.span_err(i.span, "functions used as tests can not have any arguments"), BadTestSignature::ShouldPanicOnlyWithNoArgs => diag.span_err(i.span, "functions using `#[should_panic]` must return `()`"), } false } } } else { false }; has_test_attr && has_test_signature } fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { let has_bench_attr = attr::contains_name(&i.attrs, "bench"); fn has_bench_signature(_cx: &TestCtxt, i: &ast::Item) -> bool { match i.node { ast::ItemKind::Fn(ref decl, _, _, _) => { // NB: inadequate check, but we're running // well before resolve, can't get too deep. decl.inputs.len() == 1 } _ => false } } let has_bench_signature = has_bench_signature(cx, i); if has_bench_attr && !has_bench_signature { let diag = cx.span_diagnostic; diag.span_err(i.span, "functions used as benches must have signature \ `fn(&mut Bencher) -> impl Termination`"); } has_bench_attr && has_bench_signature } fn is_ignored(i: &ast::Item) -> bool { attr::contains_name(&i.attrs, "ignore") } fn is_allowed_fail(i: &ast::Item) -> bool { attr::contains_name(&i.attrs, "allow_fail") } fn should_panic(i: &ast::Item, cx: &TestCtxt) -> ShouldPanic { match attr::find_by_name(&i.attrs, "should_panic") { Some(attr) => { let sd = cx.span_diagnostic; if attr.is_value_str() { sd.struct_span_warn( attr.span(), "attribute must be of the form: \ `#[should_panic]` or \ `#[should_panic(expected = \"error message\")]`" ).note("Errors in this attribute were erroneously allowed \ and will become a hard error in a future release.") .emit(); return ShouldPanic::Yes(None); } match attr.meta_item_list() { // Handle #[should_panic] None => ShouldPanic::Yes(None), // Handle #[should_panic(expected = "foo")] Some(list) => { let msg = list.iter() .find(|mi| mi.check_name("expected")) .and_then(|mi| mi.meta_item()) .and_then(|mi| mi.value_str()); if list.len() != 1 || msg.is_none() { sd.struct_span_warn( attr.span(), "argument must be of the form: \ `expected = \"error message\"`" ).note("Errors in this attribute were erroneously \ allowed and will become a hard error in a \ future release.").emit(); ShouldPanic::Yes(None) } else { ShouldPanic::Yes(msg) } }, } } None => ShouldPanic::No, } } /* We're going to be building a module that looks more or less like: mod __test { extern crate test (name = "test", vers = "..."); fn main() { test::test_main_static(&::os::args()[], tests, test::Options::new()) } static tests : &'static [test::TestDescAndFn] = &[ ... the list of tests in the crate ... ]; } */ fn mk_std(cx: &TestCtxt) -> P<ast::Item> { let id_test = Ident::from_str("test"); let sp = ignored_span(cx, DUMMY_SP); let (vi, vis, ident) = if cx.is_libtest { (ast::ItemKind::Use(P(ast::UseTree { span: DUMMY_SP, prefix: path_node(vec![id_test]), kind: ast::UseTreeKind::Simple(None, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID), })), ast::VisibilityKind::Public, keywords::Invalid.ident()) } else { (ast::ItemKind::ExternCrate(None), ast::VisibilityKind::Inherited, id_test) }; P(ast::Item { id: ast::DUMMY_NODE_ID, ident, node: vi, attrs: vec![], vis: dummy_spanned(vis), span: sp, tokens: None, }) } fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> { // Writing this out by hand with 'ignored_span': // pub fn main() { // #![main] // use std::slice::AsSlice; // test::test_main_static(::std::os::args().as_slice(), TESTS, test::Options::new()); // } let sp = ignored_span(cx, DUMMY_SP); let ecx = &cx.ext_cx; // test::test_main_static let test_main_path = ecx.path(sp, vec![Ident::from_str("test"), Ident::from_str("test_main_static")]); // test::test_main_static(...) let test_main_path_expr = ecx.expr_path(test_main_path); let tests_ident_expr = ecx.expr_ident(sp, Ident::from_str("TESTS")); let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![tests_ident_expr]); let call_test_main = ecx.stmt_expr(call_test_main); // #![main] let main_meta = ecx.meta_word(sp, Symbol::intern("main")); let main_attr = ecx.attribute(sp, main_meta); // pub fn main() { ... } let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); let main_body = ecx.block(sp, vec![call_test_main]); let main = ast::ItemKind::Fn(ecx.fn_decl(vec![], ast::FunctionRetTy::Ty(main_ret_ty)), ast::FnHeader::default(), ast::Generics::default(), main_body); P(ast::Item { ident: Ident::from_str("main"), attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, vis: dummy_spanned(ast::VisibilityKind::Public), span: sp, tokens: None, }) } fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) { // Link to test crate let import = mk_std(cx); // A constant vector of test descriptors. let tests = mk_tests(cx); // The synthesized main function which will call the console test runner // with our list of tests let mainfn = mk_main(cx); let testmod = ast::Mod { inner: DUMMY_SP, items: vec![import, mainfn, tests], }; let item_ = ast::ItemKind::Mod(testmod); let mod_ident = Ident::with_empty_ctxt(Symbol::gensym("__test")); let mut expander = cx.ext_cx.monotonic_expander(); let item = expander.fold_item(P(ast::Item { id: ast::DUMMY_NODE_ID, ident: mod_ident, attrs: vec![], node: item_, vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, })).pop().unwrap(); let reexport = cx.reexport_test_harness_main.map(|s| { // building `use __test::main as <ident>;` let rename = Ident::with_empty_ctxt(s); let use_path = ast::UseTree { span: DUMMY_SP, prefix: path_node(vec![mod_ident, Ident::from_str("main")]), kind: ast::UseTreeKind::Simple(Some(rename), ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID), }; expander.fold_item(P(ast::Item { id: ast::DUMMY_NODE_ID, ident: keywords::Invalid.ident(), attrs: vec![], node: ast::ItemKind::Use(P(use_path)), vis: dummy_spanned(ast::VisibilityKind::Inherited), span: DUMMY_SP, tokens: None, })).pop().unwrap() }); debug!("Synthetic test module:\n{}\n", pprust::item_to_string(&item)); (item, reexport) } fn nospan<T>(t: T) -> source_map::Spanned<T> { source_map::Spanned { node: t, span: DUMMY_SP } } fn path_node(ids: Vec<Ident>) -> ast::Path { ast::Path { span: DUMMY_SP, segments: ids.into_iter().map(|id| ast::PathSegment::from_ident(id)).collect(), } } fn path_name_i(idents: &[Ident]) -> String { let mut path_name = "".to_string(); let mut idents_iter = idents.iter().peekable(); while let Some(ident) = idents_iter.next() { path_name.push_str(&ident.as_str()); if idents_iter.peek().is_some() { path_name.push_str("::") } } path_name } fn mk_tests(cx: &TestCtxt) -> P<ast::Item> { // The vector of test_descs for this crate let test_descs = mk_test_descs(cx); // FIXME #15962: should be using quote_item, but that stringifies // __test_reexports, causing it to be reinterned, losing the // gensym information. let sp = ignored_span(cx, DUMMY_SP); let ecx = &cx.ext_cx; let struct_type = ecx.ty_path(ecx.path(sp, vec![ecx.ident_of("self"), ecx.ident_of("test"), ecx.ident_of("TestDescAndFn")])); let static_lt = ecx.lifetime(sp, keywords::StaticLifetime.ident()); // &'static [self::test::TestDescAndFn] let static_type = ecx.ty_rptr(sp, ecx.ty(sp, ast::TyKind::Slice(struct_type)), Some(static_lt), ast::Mutability::Immutable); // static TESTS: $static_type = &[...]; ecx.item_const(sp, ecx.ident_of("TESTS"), static_type, test_descs) } fn mk_test_descs(cx: &TestCtxt) -> P<ast::Expr> { debug!("building test vector from {} tests", cx.testfns.len()); P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::AddrOf(ast::Mutability::Immutable, P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Array(cx.testfns.iter().map(|test| { mk_test_desc_and_fn_rec(cx, test) }).collect()), span: DUMMY_SP, attrs: ThinVec::new(), })), span: DUMMY_SP, attrs: ThinVec::new(), }) } fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> { // FIXME #15962: should be using quote_expr, but that stringifies // __test_reexports, causing it to be reinterned, losing the // gensym information. let span = ignored_span(cx, test.span); let ecx = &cx.ext_cx; let self_id = ecx.ident_of("self"); let test_id = ecx.ident_of("test"); // creates self::test::$name let test_path = |name| { ecx.path(span, vec![self_id, test_id, ecx.ident_of(name)]) }; // creates $name: $expr let field = |name, expr| ecx.field_imm(span, ecx.ident_of(name), expr); // path to the #[test] function: "foo::bar::baz" let path_string = path_name_i(&test.path[..]); debug!("encoding {}", path_string); let name_expr = ecx.expr_str(span, Symbol::intern(&path_string)); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, ecx.expr_path(test_path("StaticTestName")), vec![name_expr]); let ignore_expr = ecx.expr_bool(span, test.ignore); let should_panic_path = |name| { ecx.path(span, vec![self_id, test_id, ecx.ident_of("ShouldPanic"), ecx.ident_of(name)]) }; let fail_expr = match test.should_panic { ShouldPanic::No => ecx.expr_path(should_panic_path("No")), ShouldPanic::Yes(msg) => { match msg { Some(msg) => { let msg = ecx.expr_str(span, msg); let path = should_panic_path("YesWithMessage"); ecx.expr_call(span, ecx.expr_path(path), vec![msg]) } None => ecx.expr_path(should_panic_path("Yes")), } } }; let allow_fail_expr = ecx.expr_bool(span, test.allow_fail); // self::test::TestDesc { ... } let desc_expr = ecx.expr_struct( span, test_path("TestDesc"), vec![field("name", name_expr), field("ignore", ignore_expr), field("should_panic", fail_expr), field("allow_fail", allow_fail_expr)]); let mut visible_path = vec![]; if cx.features.extern_absolute_paths { visible_path.push(keywords::Crate.ident()); } match cx.toplevel_reexport { Some(id) => visible_path.push(id), None => { let diag = cx.span_diagnostic; diag.bug("expected to find top-level re-export name, but found None"); } }; visible_path.extend_from_slice(&test.path[..]); // Rather than directly give the test function to the test // harness, we create a wrapper like one of the following: // // || test::assert_test_result(real_function()) // for test // |b| test::assert_test_result(real_function(b)) // for bench // // this will coerce into a fn pointer that is specialized to the // actual return type of `real_function` (Typically `()`, but not always). let fn_expr = { // construct `real_function()` (this will be inserted into the overall expr) let real_function_expr = ecx.expr_path(ecx.path_global(span, visible_path)); // construct path `test::assert_test_result` let assert_test_result = test_path("assert_test_result"); if test.bench { // construct `|b| {..}` let b_ident = Ident::with_empty_ctxt(Symbol::gensym("b")); let b_expr = ecx.expr_ident(span, b_ident); ecx.lambda( span, vec![b_ident], // construct `assert_test_result(..)` ecx.expr_call( span, ecx.expr_path(assert_test_result), vec![ // construct `real_function(b)` ecx.expr_call( span, real_function_expr, vec![b_expr], ) ], ), ) } else { // construct `|| {..}` ecx.lambda( span, vec![], // construct `assert_test_result(..)` ecx.expr_call( span, ecx.expr_path(assert_test_result), vec![ // construct `real_function()` ecx.expr_call( span, real_function_expr, vec![], ) ], ), ) } }; let variant_name = if test.bench { "StaticBenchFn" } else { "StaticTestFn" }; // self::test::$variant_name($fn_expr) let testfn_expr = ecx.expr_call(span, ecx.expr_path(test_path(variant_name)), vec![fn_expr]); // self::test::TestDescAndFn { ... } ecx.expr_struct(span, test_path("TestDescAndFn"), vec![field("desc", desc_expr), field("testfn", testfn_expr)]) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/tokenstream.rs
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! # Token Streams //! //! `TokenStream`s represent syntactic objects before they are converted into ASTs. //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s, //! which are themselves a single `Token` or a `Delimited` subsequence of tokens. //! //! ## Ownership //! `TokenStreams` are persistent data structures constructed as ropes with reference //! counted-children. In general, this means that calling an operation on a `TokenStream` //! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to //! the original. This essentially coerces `TokenStream`s into 'views' of their subparts, //! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking //! ownership of the original. use syntax_pos::{BytePos, Span, DUMMY_SP}; use ext::base; use ext::tt::{macro_parser, quoted}; use parse::Directory; use parse::token::{self, Token}; use print::pprust; use serialize::{Decoder, Decodable, Encoder, Encodable}; use util::RcVec; use std::borrow::Cow; use std::{fmt, iter, mem}; /// A delimited sequence of token trees #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub struct Delimited { /// The type of delimiter pub delim: token::DelimToken, /// The delimited sequence of token trees pub tts: ThinTokenStream, } impl Delimited { /// Returns the opening delimiter as a token. pub fn open_token(&self) -> token::Token { token::OpenDelim(self.delim) } /// Returns the closing delimiter as a token. pub fn close_token(&self) -> token::Token { token::CloseDelim(self.delim) } /// Returns the opening delimiter as a token tree. pub fn open_tt(&self, span: Span) -> TokenTree { let open_span = if span.is_dummy() { span } else { span.with_hi(span.lo() + BytePos(self.delim.len() as u32)) }; TokenTree::Token(open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. pub fn close_tt(&self, span: Span) -> TokenTree { let close_span = if span.is_dummy() { span } else { span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; TokenTree::Token(close_span, self.close_token()) } /// Returns the token trees inside the delimiters. pub fn stream(&self) -> TokenStream { self.tts.clone().into() } } /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very /// loose structure, such that all sorts of different AST-fragments can /// be passed to syntax extensions using a uniform type. /// /// If the syntax extension is an MBE macro, it will attempt to match its /// LHS token tree against the provided token tree, and if it finds a /// match, will transcribe the RHS token tree, splicing in any captured /// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds. /// /// The RHS of an MBE macro is the only place `SubstNt`s are substituted. /// Nothing special happens to misnamed or misplaced `SubstNt`s. #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { /// A single token Token(Span, token::Token), /// A delimited sequence of token trees Delimited(Span, Delimited), } impl TokenTree { /// Use this token tree as a matcher to parse given tts. pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: TokenStream) -> macro_parser::NamedParseResult { // `None` is because we're not interpolating let directory = Directory { path: Cow::from(cx.current_expansion.module.directory.as_path()), ownership: cx.current_expansion.directory_ownership, }; macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory), true) } /// Check if this TokenTree is equal to the other, regardless of span information. pub fn eq_unspanned(&self, other: &TokenTree) -> bool { match (self, other) { (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => { dl.delim == dl2.delim && dl.stream().eq_unspanned(&dl2.stream()) } (_, _) => false, } } // See comments in `interpolated_to_tokenstream` for why we care about // *probably* equal here rather than actual equality // // This is otherwise the same as `eq_unspanned`, only recursing with a // different method. pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool { match (self, other) { (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => { tk.probably_equal_for_proc_macro(tk2) } (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => { dl.delim == dl2.delim && dl.stream().probably_equal_for_proc_macro(&dl2.stream()) } (_, _) => false, } } /// Retrieve the TokenTree's span. pub fn span(&self) -> Span { match *self { TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp, } } /// Modify the `TokenTree`'s span inplace. pub fn set_span(&mut self, span: Span) { match *self { TokenTree::Token(ref mut sp, _) | TokenTree::Delimited(ref mut sp, _) => { *sp = span; } } } /// Indicates if the stream is a token that is equal to the provided token. pub fn eq_token(&self, t: Token) -> bool { match *self { TokenTree::Token(_, ref tk) => *tk == t, _ => false, } } pub fn joint(self) -> TokenStream { TokenStream { kind: TokenStreamKind::JointTree(self) } } } /// # Token Streams /// /// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s. /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s /// instead of a representation of the abstract syntax tree. /// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat. #[derive(Clone, Debug)] pub struct TokenStream { kind: TokenStreamKind, } impl TokenStream { /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream` /// separating the two arguments with a comma for diagnostic suggestions. pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> { // Used to suggest if a user writes `foo!(a b);` if let TokenStreamKind::Stream(ref slice) = self.kind { let mut suggestion = None; let mut iter = slice.iter().enumerate().peekable(); while let Some((pos, ts)) = iter.next() { if let Some((_, next)) = iter.peek() { match (ts, next) { (TokenStream { kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma)) }, _) | (_, TokenStream { kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma)) }) => {} (TokenStream { kind: TokenStreamKind::Tree(TokenTree::Token(sp, _)) }, _) | (TokenStream { kind: TokenStreamKind::Tree(TokenTree::Delimited(sp, _)) }, _) => { let sp = sp.shrink_to_hi(); let comma = TokenStream { kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)), }; suggestion = Some((pos, comma, sp)); } _ => {} } } } if let Some((pos, comma, sp)) = suggestion { let mut new_slice = vec![]; let parts = slice.split_at(pos + 1); new_slice.extend_from_slice(parts.0); new_slice.push(comma); new_slice.extend_from_slice(parts.1); let slice = RcVec::new(new_slice); return Some((TokenStream { kind: TokenStreamKind::Stream(slice) }, sp)); } } None } } #[derive(Clone, Debug)] enum TokenStreamKind { Empty, Tree(TokenTree), JointTree(TokenTree), Stream(RcVec<TokenStream>), } impl From<TokenTree> for TokenStream { fn from(tt: TokenTree) -> TokenStream { TokenStream { kind: TokenStreamKind::Tree(tt) } } } impl From<Token> for TokenStream { fn from(token: Token) -> TokenStream { TokenTree::Token(DUMMY_SP, token).into() } } impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { TokenStream::concat(iter.into_iter().map(Into::into).collect::<Vec<_>>()) } } impl Extend<TokenStream> for TokenStream { fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, iter: I) { let iter = iter.into_iter(); let kind = mem::replace(&mut self.kind, TokenStreamKind::Empty); // Vector of token streams originally in self. let tts: Vec<TokenStream> = match kind { TokenStreamKind::Empty => { let mut vec = Vec::new(); vec.reserve(iter.size_hint().0); vec } TokenStreamKind::Tree(_) | TokenStreamKind::JointTree(_) => { let mut vec = Vec::new(); vec.reserve(1 + iter.size_hint().0); vec.push(TokenStream { kind }); vec } TokenStreamKind::Stream(rc_vec) => match RcVec::try_unwrap(rc_vec) { Ok(mut vec) => { // Extend in place using the existing capacity if possible. // This is the fast path for libraries like `quote` that // build a token stream. vec.reserve(iter.size_hint().0); vec } Err(rc_vec) => { // Self is shared so we need to copy and extend that. let mut vec = Vec::new(); vec.reserve(rc_vec.len() + iter.size_hint().0); vec.extend_from_slice(&rc_vec); vec } } }; // Perform the extend, joining tokens as needed along the way. let mut builder = TokenStreamBuilder(tts); for stream in iter { builder.push(stream); } // Build the resulting token stream. If it contains more than one token, // preserve capacity in the vector in anticipation of the caller // performing additional calls to extend. let mut tts = builder.0; *self = match tts.len() { 0 => TokenStream::empty(), 1 => tts.pop().unwrap(), _ => TokenStream::concat_rc_vec(RcVec::new_preserving_capacity(tts)), }; } } impl Eq for TokenStream {} impl PartialEq<TokenStream> for TokenStream { fn eq(&self, other: &TokenStream) -> bool { self.trees().eq(other.trees()) } } impl TokenStream { pub fn len(&self) -> usize { if let TokenStreamKind::Stream(ref slice) = self.kind { slice.len() } else { 0 } } pub fn empty() -> TokenStream { TokenStream { kind: TokenStreamKind::Empty } } pub fn is_empty(&self) -> bool { match self.kind { TokenStreamKind::Empty => true, _ => false, } } pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), 1 => streams.pop().unwrap(), _ => TokenStream::concat_rc_vec(RcVec::new(streams)), } } fn concat_rc_vec(streams: RcVec<TokenStream>) -> TokenStream { TokenStream { kind: TokenStreamKind::Stream(streams) } } pub fn trees(&self) -> Cursor { self.clone().into_trees() } pub fn into_trees(self) -> Cursor { Cursor::new(self) } /// Compares two TokenStreams, checking equality without regarding span information. pub fn eq_unspanned(&self, other: &TokenStream) -> bool { let mut t1 = self.trees(); let mut t2 = other.trees(); for (t1, t2) in t1.by_ref().zip(t2.by_ref()) { if !t1.eq_unspanned(&t2) { return false; } } t1.next().is_none() && t2.next().is_none() } // See comments in `interpolated_to_tokenstream` for why we care about // *probably* equal here rather than actual equality // // This is otherwise the same as `eq_unspanned`, only recursing with a // different method. pub fn probably_equal_for_proc_macro(&self, other: &TokenStream) -> bool { let mut t1 = self.trees(); let mut t2 = other.trees(); for (t1, t2) in t1.by_ref().zip(t2.by_ref()) { if !t1.probably_equal_for_proc_macro(&t2) { return false; } } t1.next().is_none() && t2.next().is_none() } /// Precondition: `self` consists of a single token tree. /// Returns true if the token tree is a joint operation w.r.t. `proc_macro::TokenNode`. pub fn as_tree(self) -> (TokenTree, bool /* joint? */) { match self.kind { TokenStreamKind::Tree(tree) => (tree, false), TokenStreamKind::JointTree(tree) => (tree, true), _ => unreachable!(), } } pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream { let mut trees = self.into_trees(); let mut result = Vec::new(); let mut i = 0; while let Some(stream) = trees.next_as_stream() { result.push(match stream.kind { TokenStreamKind::Tree(tree) => f(i, tree).into(), TokenStreamKind::JointTree(tree) => f(i, tree).joint(), _ => unreachable!() }); i += 1; } TokenStream::concat(result) } pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream { let mut trees = self.into_trees(); let mut result = Vec::new(); while let Some(stream) = trees.next_as_stream() { result.push(match stream.kind { TokenStreamKind::Tree(tree) => f(tree).into(), TokenStreamKind::JointTree(tree) => f(tree).joint(), _ => unreachable!() }); } TokenStream::concat(result) } fn first_tree_and_joint(&self) -> Option<(TokenTree, bool)> { match self.kind { TokenStreamKind::Empty => None, TokenStreamKind::Tree(ref tree) => Some((tree.clone(), false)), TokenStreamKind::JointTree(ref tree) => Some((tree.clone(), true)), TokenStreamKind::Stream(ref stream) => stream.first().unwrap().first_tree_and_joint(), } } fn last_tree_if_joint(&self) -> Option<TokenTree> { match self.kind { TokenStreamKind::Empty | TokenStreamKind::Tree(..) => None, TokenStreamKind::JointTree(ref tree) => Some(tree.clone()), TokenStreamKind::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(), } } } #[derive(Clone)] pub struct TokenStreamBuilder(Vec<TokenStream>); impl TokenStreamBuilder { pub fn new() -> TokenStreamBuilder { TokenStreamBuilder(Vec::new()) } pub fn push<T: Into<TokenStream>>(&mut self, stream: T) { let stream = stream.into(); let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint { if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() { if let Some(glued_tok) = last_tok.glue(tok) { let last_stream = self.0.pop().unwrap(); self.push_all_but_last_tree(&last_stream); let glued_span = last_span.to(span); let glued_tt = TokenTree::Token(glued_span, glued_tok); let glued_tokenstream = if is_joint { glued_tt.joint() } else { glued_tt.into() }; self.0.push(glued_tokenstream); self.push_all_but_first_tree(&stream); return } } } self.0.push(stream); } pub fn add<T: Into<TokenStream>>(mut self, stream: T) -> Self { self.push(stream); self } pub fn build(self) -> TokenStream { TokenStream::concat(self.0) } fn push_all_but_last_tree(&mut self, stream: &TokenStream) { if let TokenStreamKind::Stream(ref streams) = stream.kind { let len = streams.len(); match len { 1 => {} 2 => self.0.push(streams[0].clone().into()), _ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(0 .. len - 1))), } self.push_all_but_last_tree(&streams[len - 1]) } } fn push_all_but_first_tree(&mut self, stream: &TokenStream) { if let TokenStreamKind::Stream(ref streams) = stream.kind { let len = streams.len(); match len { 1 => {} 2 => self.0.push(streams[1].clone().into()), _ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(1 .. len))), } self.push_all_but_first_tree(&streams[0]) } } } #[derive(Clone)] pub struct Cursor(CursorKind); #[derive(Clone)] enum CursorKind { Empty, Tree(TokenTree, bool /* consumed? */), JointTree(TokenTree, bool /* consumed? */), Stream(StreamCursor), } #[derive(Clone)] struct StreamCursor { stream: RcVec<TokenStream>, index: usize, stack: Vec<(RcVec<TokenStream>, usize)>, } impl StreamCursor { fn new(stream: RcVec<TokenStream>) -> Self { StreamCursor { stream: stream, index: 0, stack: Vec::new() } } fn next_as_stream(&mut self) -> Option<TokenStream> { loop { if self.index < self.stream.len() { self.index += 1; let next = self.stream[self.index - 1].clone(); match next.kind { TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next), TokenStreamKind::Stream(stream) => self.insert(stream), TokenStreamKind::Empty => {} } } else if let Some((stream, index)) = self.stack.pop() { self.stream = stream; self.index = index; } else { return None; } } } fn insert(&mut self, stream: RcVec<TokenStream>) { self.stack.push((mem::replace(&mut self.stream, stream), mem::replace(&mut self.index, 0))); } } impl Iterator for Cursor { type Item = TokenTree; fn next(&mut self) -> Option<TokenTree> { self.next_as_stream().map(|stream| match stream.kind { TokenStreamKind::Tree(tree) | TokenStreamKind::JointTree(tree) => tree, _ => unreachable!() }) } } impl Cursor { fn new(stream: TokenStream) -> Self { Cursor(match stream.kind { TokenStreamKind::Empty => CursorKind::Empty, TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false), TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false), TokenStreamKind::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)), }) } pub fn next_as_stream(&mut self) -> Option<TokenStream> { let (stream, consumed) = match self.0 { CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree.clone().into(), consumed), CursorKind::JointTree(ref tree, ref mut consumed @ false) => (tree.clone().joint(), consumed), CursorKind::Stream(ref mut cursor) => return cursor.next_as_stream(), _ => return None, }; *consumed = true; Some(stream) } pub fn insert(&mut self, stream: TokenStream) { match self.0 { _ if stream.is_empty() => return, CursorKind::Empty => *self = stream.trees(), CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => { *self = TokenStream::concat(vec![self.original_stream(), stream]).trees(); if consumed { self.next(); } } CursorKind::Stream(ref mut cursor) => { cursor.insert(ThinTokenStream::from(stream).0.unwrap()); } } } pub fn original_stream(&self) -> TokenStream { match self.0 { CursorKind::Empty => TokenStream::empty(), CursorKind::Tree(ref tree, _) => tree.clone().into(), CursorKind::JointTree(ref tree, _) => tree.clone().joint(), CursorKind::Stream(ref cursor) => TokenStream::concat_rc_vec({ cursor.stack.get(0).cloned().map(|(stream, _)| stream) .unwrap_or(cursor.stream.clone()) }), } } pub fn look_ahead(&self, n: usize) -> Option<TokenTree> { fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize> { for stream in streams { n = match stream.kind { TokenStreamKind::Tree(ref tree) | TokenStreamKind::JointTree(ref tree) if n == 0 => return Ok(tree.clone()), TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => n - 1, TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) { Ok(tree) => return Ok(tree), Err(n) => n, }, _ => n, }; } Err(n) } match self.0 { CursorKind::Empty | CursorKind::Tree(_, true) | CursorKind::JointTree(_, true) => Err(n), CursorKind::Tree(ref tree, false) | CursorKind::JointTree(ref tree, false) => look_ahead(&[tree.clone().into()], n), CursorKind::Stream(ref cursor) => { look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| { for &(ref stream, index) in cursor.stack.iter().rev() { n = match look_ahead(&stream[index..], n) { Ok(tree) => return Ok(tree), Err(n) => n, } } Err(n) }) } }.ok() } } /// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation. /// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`. /// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion. #[derive(Debug, Clone)] pub struct ThinTokenStream(Option<RcVec<TokenStream>>); impl From<TokenStream> for ThinTokenStream { fn from(stream: TokenStream) -> ThinTokenStream { ThinTokenStream(match stream.kind { TokenStreamKind::Empty => None, TokenStreamKind::Tree(tree) => Some(RcVec::new(vec![tree.into()])), TokenStreamKind::JointTree(tree) => Some(RcVec::new(vec![tree.joint()])), TokenStreamKind::Stream(stream) => Some(stream), }) } } impl From<ThinTokenStream> for TokenStream { fn from(stream: ThinTokenStream) -> TokenStream { stream.0.map(TokenStream::concat_rc_vec).unwrap_or_else(TokenStream::empty) } } impl Eq for ThinTokenStream {} impl PartialEq<ThinTokenStream> for ThinTokenStream { fn eq(&self, other: &ThinTokenStream) -> bool { TokenStream::from(self.clone()) == TokenStream::from(other.clone()) } } impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&pprust::tokens_to_string(self.clone())) } } impl Encodable for TokenStream { fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> { self.trees().collect::<Vec<_>>().encode(encoder) } } impl Decodable for TokenStream { fn decode<D: Decoder>(decoder: &mut D) -> Result<TokenStream, D::Error> { Vec::<TokenTree>::decode(decoder).map(|vec| vec.into_iter().collect()) } } impl Encodable for ThinTokenStream { fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> { TokenStream::from(self.clone()).encode(encoder) } } impl Decodable for ThinTokenStream { fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> { TokenStream::decode(decoder).map(Into::into) } } #[cfg(test)] mod tests { use super::*; use syntax::ast::Ident; use with_globals; use syntax_pos::{Span, BytePos, NO_EXPANSION}; use parse::token::Token; use util::parser_testing::string_to_stream; fn string_to_ts(string: &str) -> TokenStream { string_to_stream(string.to_owned()) } fn sp(a: u32, b: u32) -> Span { Span::new(BytePos(a), BytePos(b), NO_EXPANSION) } #[test] fn test_concat() { with_globals(|| { let test_res = string_to_ts("foo::bar::baz"); let test_fst = string_to_ts("foo::bar"); let test_snd = string_to_ts("::baz"); let eq_res = TokenStream::concat(vec![test_fst, test_snd]); assert_eq!(test_res.trees().count(), 5); assert_eq!(eq_res.trees().count(), 5); assert_eq!(test_res.eq_unspanned(&eq_res), true); }) } #[test] fn test_to_from_bijection() { with_globals(|| { let test_start = string_to_ts("foo::bar(baz)"); let test_end = test_start.trees().collect(); assert_eq!(test_start, test_end) }) } #[test] fn test_eq_0() { with_globals(|| { let test_res = string_to_ts("foo"); let test_eqs = string_to_ts("foo"); assert_eq!(test_res, test_eqs) }) } #[test] fn test_eq_1() { with_globals(|| { let test_res = string_to_ts("::bar::baz"); let test_eqs = string_to_ts("::bar::baz"); assert_eq!(test_res, test_eqs) }) } #[test] fn test_eq_3() { with_globals(|| { let test_res = string_to_ts(""); let test_eqs = string_to_ts(""); assert_eq!(test_res, test_eqs) }) } #[test] fn test_diseq_0() { with_globals(|| { let test_res = string_to_ts("::bar::baz"); let test_eqs = string_to_ts("bar::baz"); assert_eq!(test_res == test_eqs, false) }) } #[test] fn test_diseq_1() { with_globals(|| { let test_res = string_to_ts("(bar,baz)"); let test_eqs = string_to_ts("bar,baz"); assert_eq!(test_res == test_eqs, false) }) } #[test] fn test_is_empty() { with_globals(|| { let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect(); let test1: TokenStream = TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into(); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); assert_eq!(test1.is_empty(), false); assert_eq!(test2.is_empty(), false); }) } #[test] fn test_dotdotdot() { let mut builder = TokenStreamBuilder::new(); builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint()); builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint()); builder.push(TokenTree::Token(sp(2, 3), Token::Dot)); let stream = builder.build(); assert!(stream.eq_unspanned(&string_to_ts("..."))); assert_eq!(stream.trees().count(), 1); } #[test] fn test_extend_empty() { with_globals(|| { // Append a token onto an empty token stream. let mut stream = TokenStream::empty(); stream.extend(vec![string_to_ts("t")]); let expected = string_to_ts("t"); assert!(stream.eq_unspanned(&expected)); }); } #[test] fn test_extend_nothing() { with_globals(|| { // Append nothing onto a token stream containing one token. let mut stream = string_to_ts("t"); stream.extend(vec![]); let expected = string_to_ts("t"); assert!(stream.eq_unspanned(&expected)); }); } #[test] fn test_extend_single() { with_globals(|| { // Append a token onto token stream containing a single token. let mut stream = string_to_ts("t1"); stream.extend(vec![string_to_ts("t2")]); let expected = string_to_ts("t1 t2"); assert!(stream.eq_unspanned(&expected)); }); } #[test] fn test_extend_in_place() { with_globals(|| { // Append a token onto token stream containing a reference counted // vec of tokens. The token stream has a reference count of 1 so // this can happen in place. let mut stream = string_to_ts("t1 t2"); stream.extend(vec![string_to_ts("t3")]); let expected = string_to_ts("t1 t2 t3"); assert!(stream.eq_unspanned(&expected)); }); } #[test] fn test_extend_copy() { with_globals(|| { // Append a token onto token stream containing a reference counted // vec of tokens. The token stream is shared so the extend takes // place on a copy. let mut stream = string_to_ts("t1 t2"); let _incref = stream.clone(); stream.extend(vec![string_to_ts("t3")]); let expected = string_to_ts("t1 t2 t3"); assert!(stream.eq_unspanned(&expected)); }); } #[test] fn test_extend_no_join() { with_globals(|| { let first = TokenTree::Token(DUMMY_SP, Token::Dot); let second = TokenTree::Token(DUMMY_SP, Token::Dot); // Append a dot onto a token stream containing a dot, but do not // join them. let mut stream = TokenStream::from(first); stream.extend(vec![TokenStream::from(second)]); let expected = string_to_ts(". ."); assert!(stream.eq_unspanned(&expected)); let unexpected = string_to_ts(".."); assert!(!stream.eq_unspanned(&unexpected)); }); } #[test] fn test_extend_join() { with_globals(|| { let first = TokenTree::Token(DUMMY_SP, Token::Dot).joint(); let second = TokenTree::Token(DUMMY_SP, Token::Dot); // Append a dot onto a token stream containing a dot, forming a // dotdot. let mut stream = first; stream.extend(vec![TokenStream::from(second)]); let expected = string_to_ts(".."); assert!(stream.eq_unspanned(&expected)); let unexpected = string_to_ts(". ."); assert!(!stream.eq_unspanned(&unexpected)); }); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/source_map.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The SourceMap tracks all the source code used within a single crate, mapping //! from integer byte positions to the original source code location. Each bit //! of source parsed during crate parsing (typically files, in-memory strings, //! or various bits of macro expansion) cover a continuous range of bytes in the //! SourceMap and are represented by SourceFiles. Byte positions are stored in //! `spans` and used pervasively in the compiler. They are absolute positions //! within the SourceMap, which upon request can be converted to line and column //! information, source code snippets, etc. pub use syntax_pos::*; pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo}; pub use self::ExpnFormat::*; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::sync::{Lrc, Lock, LockGuard}; use std::cmp; use std::hash::Hash; use std::path::{Path, PathBuf}; use std::env; use std::fs; use std::io::{self, Read}; use errors::SourceMapper; /// Return the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { let call_site1 = sp.ctxt().outer().expn_info().map(|ei| ei.call_site); let call_site2 = enclosing_sp.ctxt().outer().expn_info().map(|ei| ei.call_site); match (call_site1, call_site2) { (None, _) => sp, (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct Spanned<T> { pub node: T, pub span: Span, } pub fn respan<T>(sp: Span, t: T) -> Spanned<T> { Spanned {node: t, span: sp} } pub fn dummy_spanned<T>(t: T) -> Spanned<T> { respan(DUMMY_SP, t) } // _____________________________________________________________________________ // SourceFile, MultiByteChar, FileName, FileLines // /// An abstraction over the fs operations used by the Parser. pub trait FileLoader { /// Query the existence of a file. fn file_exists(&self, path: &Path) -> bool; /// Return an absolute path to a file, if possible. fn abs_path(&self, path: &Path) -> Option<PathBuf>; /// Read the contents of an UTF-8 file into memory. fn read_file(&self, path: &Path) -> io::Result<String>; } /// A FileLoader that uses std::fs to load real files. pub struct RealFileLoader; impl FileLoader for RealFileLoader { fn file_exists(&self, path: &Path) -> bool { fs::metadata(path).is_ok() } fn abs_path(&self, path: &Path) -> Option<PathBuf> { if path.is_absolute() { Some(path.to_path_buf()) } else { env::current_dir() .ok() .map(|cwd| cwd.join(path)) } } fn read_file(&self, path: &Path) -> io::Result<String> { let mut src = String::new(); fs::File::open(path)?.read_to_string(&mut src)?; Ok(src) } } // This is a SourceFile identifier that is used to correlate SourceFiles between // subsequent compilation sessions (which is something we need to do during // incremental compilation). #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)] pub struct StableFilemapId(u128); impl StableFilemapId { pub fn new(source_file: &SourceFile) -> StableFilemapId { let mut hasher = StableHasher::new(); source_file.name.hash(&mut hasher); source_file.name_was_remapped.hash(&mut hasher); source_file.unmapped_path.hash(&mut hasher); StableFilemapId(hasher.finish()) } } // _____________________________________________________________________________ // SourceMap // pub(super) struct SourceMapFiles { pub(super) file_maps: Vec<Lrc<SourceFile>>, stable_id_to_source_file: FxHashMap<StableFilemapId, Lrc<SourceFile>> } pub struct SourceMap { pub(super) files: Lock<SourceMapFiles>, file_loader: Box<dyn FileLoader + Sync + Send>, // This is used to apply the file path remapping as specified via // --remap-path-prefix to all SourceFiles allocated within this SourceMap. path_mapping: FilePathMapping, /// In case we are in a doctest, replace all file names with the PathBuf, /// and add the given offsets to the line info doctest_offset: Option<(FileName, isize)>, } impl SourceMap { pub fn new(path_mapping: FilePathMapping) -> SourceMap { SourceMap { files: Lock::new(SourceMapFiles { file_maps: Vec::new(), stable_id_to_source_file: FxHashMap(), }), file_loader: Box::new(RealFileLoader), path_mapping, doctest_offset: None, } } pub fn new_doctest(path_mapping: FilePathMapping, file: FileName, line: isize) -> SourceMap { SourceMap { doctest_offset: Some((file, line)), ..SourceMap::new(path_mapping) } } pub fn with_file_loader(file_loader: Box<dyn FileLoader + Sync + Send>, path_mapping: FilePathMapping) -> SourceMap { SourceMap { files: Lock::new(SourceMapFiles { file_maps: Vec::new(), stable_id_to_source_file: FxHashMap(), }), file_loader: file_loader, path_mapping, doctest_offset: None, } } pub fn path_mapping(&self) -> &FilePathMapping { &self.path_mapping } pub fn file_exists(&self, path: &Path) -> bool { self.file_loader.file_exists(path) } pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> { let src = self.file_loader.read_file(path)?; let filename = if let Some((ref name, _)) = self.doctest_offset { name.clone() } else { path.to_owned().into() }; Ok(self.new_source_file(filename, src)) } pub fn files(&self) -> LockGuard<Vec<Lrc<SourceFile>>> { LockGuard::map(self.files.borrow(), |files| &mut files.file_maps) } pub fn source_file_by_stable_id(&self, stable_id: StableFilemapId) -> Option<Lrc<SourceFile>> { self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|fm| fm.clone()) } fn next_start_pos(&self) -> usize { match self.files.borrow().file_maps.last() { None => 0, // Add one so there is some space between files. This lets us distinguish // positions in the source_map, even in the presence of zero-length files. Some(last) => last.end_pos.to_usize() + 1, } } /// Creates a new source_file. /// This does not ensure that only one SourceFile exists per file name. pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> { let start_pos = self.next_start_pos(); // The path is used to determine the directory for loading submodules and // include files, so it must be before remapping. // Note that filename may not be a valid path, eg it may be `<anon>` etc, // but this is okay because the directory determined by `path.pop()` will // be empty, so the working directory will be used. let unmapped_path = filename.clone(); let (filename, was_remapped) = match filename { FileName::Real(filename) => { let (filename, was_remapped) = self.path_mapping.map_prefix(filename); (FileName::Real(filename), was_remapped) }, other => (other, false), }; let source_file = Lrc::new(SourceFile::new( filename, was_remapped, unmapped_path, src, Pos::from_usize(start_pos), )); let mut files = self.files.borrow_mut(); files.file_maps.push(source_file.clone()); files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); source_file } /// Allocates a new SourceFile representing a source file from an external /// crate. The source code of such an "imported source_file" is not available, /// but we still know enough to generate accurate debuginfo location /// information for things inlined from other crates. pub fn new_imported_source_file(&self, filename: FileName, name_was_remapped: bool, crate_of_origin: u32, src_hash: u128, name_hash: u128, source_len: usize, mut file_local_lines: Vec<BytePos>, mut file_local_multibyte_chars: Vec<MultiByteChar>, mut file_local_non_narrow_chars: Vec<NonNarrowChar>) -> Lrc<SourceFile> { let start_pos = self.next_start_pos(); let end_pos = Pos::from_usize(start_pos + source_len); let start_pos = Pos::from_usize(start_pos); for pos in &mut file_local_lines { *pos = *pos + start_pos; } for mbc in &mut file_local_multibyte_chars { mbc.pos = mbc.pos + start_pos; } for swc in &mut file_local_non_narrow_chars { *swc = *swc + start_pos; } let source_file = Lrc::new(SourceFile { name: filename, name_was_remapped, unmapped_path: None, crate_of_origin, src: None, src_hash, external_src: Lock::new(ExternalSource::AbsentOk), start_pos, end_pos, lines: file_local_lines, multibyte_chars: file_local_multibyte_chars, non_narrow_chars: file_local_non_narrow_chars, name_hash, }); let mut files = self.files.borrow_mut(); files.file_maps.push(source_file.clone()); files.stable_id_to_source_file.insert(StableFilemapId::new(&source_file), source_file.clone()); source_file } pub fn mk_substr_filename(&self, sp: Span) -> String { let pos = self.lookup_char_pos(sp.lo()); format!("<{}:{}:{}>", pos.file.name, pos.line, pos.col.to_usize() + 1) } // If there is a doctest_offset, apply it to the line pub fn doctest_offset_line(&self, mut orig: usize) -> usize { if let Some((_, line)) = self.doctest_offset { if line >= 0 { orig = orig + line as usize; } else { orig = orig - (-line) as usize; } } orig } /// Lookup source information about a BytePos pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { let chpos = self.bytepos_to_file_charpos(pos); match self.lookup_line(pos) { Ok(SourceFileAndLine { fm: f, line: a }) => { let line = a + 1; // Line numbers start at 1 let linebpos = f.lines[a]; let linechpos = self.bytepos_to_file_charpos(linebpos); let col = chpos - linechpos; let col_display = { let start_width_idx = f .non_narrow_chars .binary_search_by_key(&linebpos, |x| x.pos()) .unwrap_or_else(|x| x); let end_width_idx = f .non_narrow_chars .binary_search_by_key(&pos, |x| x.pos()) .unwrap_or_else(|x| x); let special_chars = end_width_idx - start_width_idx; let non_narrow: usize = f .non_narrow_chars[start_width_idx..end_width_idx] .into_iter() .map(|x| x.width()) .sum(); col.0 - special_chars + non_narrow }; debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos); debug!("byte is on line: {}", line); assert!(chpos >= linechpos); Loc { file: f, line, col, col_display, } } Err(f) => { let col_display = { let end_width_idx = f .non_narrow_chars .binary_search_by_key(&pos, |x| x.pos()) .unwrap_or_else(|x| x); let non_narrow: usize = f .non_narrow_chars[0..end_width_idx] .into_iter() .map(|x| x.width()) .sum(); chpos.0 - end_width_idx + non_narrow }; Loc { file: f, line: 0, col: chpos, col_display, } } } } // If the relevant source_file is empty, we don't return a line number. pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> { let idx = self.lookup_source_file_idx(pos); let f = (*self.files.borrow().file_maps)[idx].clone(); match f.lookup_line(pos) { Some(line) => Ok(SourceFileAndLine { fm: f, line: line }), None => Err(f) } } pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { let loc = self.lookup_char_pos(pos); LocWithOpt { filename: loc.file.name.clone(), line: loc.line, col: loc.col, file: Some(loc.file) } } /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If /// there are gaps between lhs and rhs, the resulting union will cross these gaps. /// For this to work, the spans have to be: /// /// * the ctxt of both spans much match /// * the lhs span needs to end on the same line the rhs span begins /// * the lhs span must start at or before the rhs span pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> { // make sure we're at the same expansion id if sp_lhs.ctxt() != sp_rhs.ctxt() { return None; } let lhs_end = match self.lookup_line(sp_lhs.hi()) { Ok(x) => x, Err(_) => return None }; let rhs_begin = match self.lookup_line(sp_rhs.lo()) { Ok(x) => x, Err(_) => return None }; // if we must cross lines to merge, don't merge if lhs_end.line != rhs_begin.line { return None; } // ensure these follow the expected order and we don't overlap if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) { Some(sp_lhs.to(sp_rhs)) } else { None } } pub fn span_to_string(&self, sp: Span) -> String { if self.files.borrow().file_maps.is_empty() && sp.is_dummy() { return "no-location".to_string(); } let lo = self.lookup_char_pos_adj(sp.lo()); let hi = self.lookup_char_pos_adj(sp.hi()); format!("{}:{}:{}: {}:{}", lo.filename, lo.line, lo.col.to_usize() + 1, hi.line, hi.col.to_usize() + 1) } pub fn span_to_filename(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo()).file.name.clone() } pub fn span_to_unmapped_path(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo()).file.unmapped_path.clone() .expect("SourceMap::span_to_unmapped_path called for imported SourceFile?") } pub fn is_multiline(&self, sp: Span) -> bool { let lo = self.lookup_char_pos(sp.lo()); let hi = self.lookup_char_pos(sp.hi()); lo.line != hi.line } pub fn span_to_lines(&self, sp: Span) -> FileLinesResult { debug!("span_to_lines(sp={:?})", sp); if sp.lo() > sp.hi() { return Err(SpanLinesError::IllFormedSpan(sp)); } let lo = self.lookup_char_pos(sp.lo()); debug!("span_to_lines: lo={:?}", lo); let hi = self.lookup_char_pos(sp.hi()); debug!("span_to_lines: hi={:?}", hi); if lo.file.start_pos != hi.file.start_pos { return Err(SpanLinesError::DistinctSources(DistinctSources { begin: (lo.file.name.clone(), lo.file.start_pos), end: (hi.file.name.clone(), hi.file.start_pos), })); } assert!(hi.line >= lo.line); let mut lines = Vec::with_capacity(hi.line - lo.line + 1); // The span starts partway through the first line, // but after that it starts from offset 0. let mut start_col = lo.col; // For every line but the last, it extends from `start_col` // and to the end of the line. Be careful because the line // numbers in Loc are 1-based, so we subtract 1 to get 0-based // lines. for line_index in lo.line-1 .. hi.line-1 { let line_len = lo.file.get_line(line_index) .map(|s| s.chars().count()) .unwrap_or(0); lines.push(LineInfo { line_index, start_col, end_col: CharPos::from_usize(line_len) }); start_col = CharPos::from_usize(0); } // For the last line, it extends from `start_col` to `hi.col`: lines.push(LineInfo { line_index: hi.line - 1, start_col, end_col: hi.col }); Ok(FileLines {file: lo.file, lines: lines}) } /// Extract the source surrounding the given `Span` using the `extract_source` function. The /// extract function takes three arguments: a string slice containing the source, an index in /// the slice for the beginning of the span and an index in the slice for the end of the span. fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError> where F: Fn(&str, usize, usize) -> String { if sp.lo() > sp.hi() { return Err(SpanSnippetError::IllFormedSpan(sp)); } let local_begin = self.lookup_byte_offset(sp.lo()); let local_end = self.lookup_byte_offset(sp.hi()); if local_begin.fm.start_pos != local_end.fm.start_pos { return Err(SpanSnippetError::DistinctSources(DistinctSources { begin: (local_begin.fm.name.clone(), local_begin.fm.start_pos), end: (local_end.fm.name.clone(), local_end.fm.start_pos) })); } else { self.ensure_source_file_source_present(local_begin.fm.clone()); let start_index = local_begin.pos.to_usize(); let end_index = local_end.pos.to_usize(); let source_len = (local_begin.fm.end_pos - local_begin.fm.start_pos).to_usize(); if start_index > end_index || end_index > source_len { return Err(SpanSnippetError::MalformedForCodemap( MalformedCodemapPositions { name: local_begin.fm.name.clone(), source_len, begin_pos: local_begin.pos, end_pos: local_end.pos, })); } if let Some(ref src) = local_begin.fm.src { return Ok(extract_source(src, start_index, end_index)); } else if let Some(src) = local_begin.fm.external_src.borrow().get_source() { return Ok(extract_source(src, start_index, end_index)); } else { return Err(SpanSnippetError::SourceNotAvailable { filename: local_begin.fm.name.clone() }); } } } /// Return the source snippet as `String` corresponding to the given `Span` pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> { self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index] .to_string()) } /// Return the source snippet as `String` before the given `Span` pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> { self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string()) } /// Extend the given `Span` to just after the previous occurrence of `c`. Return the same span /// if no character could be found or if an error occurred while retrieving the code snippet. pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span { if let Ok(prev_source) = self.span_to_prev_source(sp) { let prev_source = prev_source.rsplit(c).nth(0).unwrap_or("").trim_left(); if !prev_source.is_empty() && !prev_source.contains('\n') { return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); } } sp } /// Extend the given `Span` to just after the previous occurrence of `pat` when surrounded by /// whitespace. Return the same span if no character could be found or if an error occurred /// while retrieving the code snippet. pub fn span_extend_to_prev_str(&self, sp: Span, pat: &str, accept_newlines: bool) -> Span { // assure that the pattern is delimited, to avoid the following // fn my_fn() // ^^^^ returned span without the check // ---------- correct span for ws in &[" ", "\t", "\n"] { let pat = pat.to_owned() + ws; if let Ok(prev_source) = self.span_to_prev_source(sp) { let prev_source = prev_source.rsplit(&pat).nth(0).unwrap_or("").trim_left(); if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) { return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32)); } } } sp } /// Given a `Span`, try to get a shorter span ending before the first occurrence of `c` `char` pub fn span_until_char(&self, sp: Span, c: char) -> Span { match self.span_to_snippet(sp) { Ok(snippet) => { let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right(); if !snippet.is_empty() && !snippet.contains('\n') { sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32)) } else { sp } } _ => sp, } } /// Given a `Span`, try to get a shorter span ending just after the first occurrence of `char` /// `c`. pub fn span_through_char(&self, sp: Span, c: char) -> Span { if let Ok(snippet) = self.span_to_snippet(sp) { if let Some(offset) = snippet.find(c) { return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32)); } } sp } /// Given a `Span`, get a new `Span` covering the first token and all its trailing whitespace or /// the original `Span`. /// /// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned. pub fn span_until_non_whitespace(&self, sp: Span) -> Span { let mut whitespace_found = false; self.span_take_while(sp, |c| { if !whitespace_found && c.is_whitespace() { whitespace_found = true; } if whitespace_found && !c.is_whitespace() { false } else { true } }) } /// Given a `Span`, get a new `Span` covering the first token without its trailing whitespace or /// the original `Span` in case of error. /// /// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned. pub fn span_until_whitespace(&self, sp: Span) -> Span { self.span_take_while(sp, |c| !c.is_whitespace()) } /// Given a `Span`, get a shorter one until `predicate` yields false. pub fn span_take_while<P>(&self, sp: Span, predicate: P) -> Span where P: for <'r> FnMut(&'r char) -> bool { if let Ok(snippet) = self.span_to_snippet(sp) { let offset = snippet.chars() .take_while(predicate) .map(|c| c.len_utf8()) .sum::<usize>(); sp.with_hi(BytePos(sp.lo().0 + (offset as u32))) } else { sp } } pub fn def_span(&self, sp: Span) -> Span { self.span_until_char(sp, '{') } /// Returns a new span representing just the start-point of this span pub fn start_point(&self, sp: Span) -> Span { let pos = sp.lo().0; let width = self.find_width_of_character_at_span(sp, false); let corrected_start_position = pos.checked_add(width).unwrap_or(pos); let end_point = BytePos(cmp::max(corrected_start_position, sp.lo().0)); sp.with_hi(end_point) } /// Returns a new span representing just the end-point of this span pub fn end_point(&self, sp: Span) -> Span { let pos = sp.hi().0; let width = self.find_width_of_character_at_span(sp, false); let corrected_end_position = pos.checked_sub(width).unwrap_or(pos); let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0)); sp.with_lo(end_point) } /// Returns a new span representing the next character after the end-point of this span pub fn next_point(&self, sp: Span) -> Span { let start_of_next_point = sp.hi().0; let width = self.find_width_of_character_at_span(sp, true); // If the width is 1, then the next span should point to the same `lo` and `hi`. However, // in the case of a multibyte character, where the width != 1, the next span should // span multiple bytes to include the whole character. let end_of_next_point = start_of_next_point.checked_add( width - 1).unwrap_or(start_of_next_point); let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point)); Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt()) } /// Finds the width of a character, either before or after the provided span. fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 { // Disregard malformed spans and assume a one-byte wide character. if sp.lo() >= sp.hi() { debug!("find_width_of_character_at_span: early return malformed span"); return 1; } let local_begin = self.lookup_byte_offset(sp.lo()); let local_end = self.lookup_byte_offset(sp.hi()); debug!("find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`", local_begin, local_end); let start_index = local_begin.pos.to_usize(); let end_index = local_end.pos.to_usize(); debug!("find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`", start_index, end_index); // Disregard indexes that are at the start or end of their spans, they can't fit bigger // characters. if (!forwards && end_index == usize::min_value()) || (forwards && start_index == usize::max_value()) { debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte"); return 1; } let source_len = (local_begin.fm.end_pos - local_begin.fm.start_pos).to_usize(); debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len); // Ensure indexes are also not malformed. if start_index > end_index || end_index > source_len { debug!("find_width_of_character_at_span: source indexes are malformed"); return 1; } let src = local_begin.fm.external_src.borrow(); // We need to extend the snippet to the end of the src rather than to end_index so when // searching forwards for boundaries we've got somewhere to search. let snippet = if let Some(ref src) = local_begin.fm.src { let len = src.len(); (&src[start_index..len]) } else if let Some(src) = src.get_source() { let len = src.len(); (&src[start_index..len]) } else { return 1; }; debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet); let mut target = if forwards { end_index + 1 } else { end_index - 1 }; debug!("find_width_of_character_at_span: initial target=`{:?}`", target); while !snippet.is_char_boundary(target - start_index) && target < source_len { target = if forwards { target + 1 } else { match target.checked_sub(1) { Some(target) => target, None => { break; } } }; debug!("find_width_of_character_at_span: target=`{:?}`", target); } debug!("find_width_of_character_at_span: final target=`{:?}`", target); if forwards { (target - end_index) as u32 } else { (end_index - target) as u32 } } pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> { for fm in self.files.borrow().file_maps.iter() { if *filename == fm.name { return Some(fm.clone()); } } None } /// For a global BytePos compute the local offset within the containing SourceFile pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { let idx = self.lookup_source_file_idx(bpos); let fm = (*self.files.borrow().file_maps)[idx].clone(); let offset = bpos - fm.start_pos; SourceFileAndBytePos {fm: fm, pos: offset} } /// Converts an absolute BytePos to a CharPos relative to the source_file. pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { let idx = self.lookup_source_file_idx(bpos); let map = &(*self.files.borrow().file_maps)[idx]; // The number of extra bytes due to multibyte chars in the SourceFile let mut total_extra_bytes = 0; for mbc in map.multibyte_chars.iter() { debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); if mbc.pos < bpos { // every character is at least one byte, so we only // count the actual extra bytes. total_extra_bytes += mbc.bytes as u32 - 1; // We should never see a byte position in the middle of a // character assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); } else { break; } } assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize) } // Return the index of the source_file (in self.files) which contains pos. pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize { let files = self.files.borrow(); let files = &files.file_maps; let count = files.len(); // Binary search for the source_file. let mut a = 0; let mut b = count; while b - a > 1 { let m = (a + b) / 2; if files[m].start_pos > pos { b = m; } else { a = m; } } assert!(a < count, "position {} does not resolve to a source location", pos.to_usize()); return a; } pub fn count_lines(&self) -> usize { self.files().iter().fold(0, |a, f| a + f.count_lines()) } pub fn generate_fn_name_span(&self, span: Span) -> Option<Span> { let prev_span = self.span_extend_to_prev_str(span, "fn", true); self.span_to_snippet(prev_span).map(|snippet| { let len = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') .expect("no label after fn"); prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32)) }).ok() } /// Take the span of a type parameter in a function signature and try to generate a span for the /// function name (with generics) and a new snippet for this span with the pointed type /// parameter as a new local type parameter. /// /// For instance: /// ```rust,ignore (pseudo-Rust) /// // Given span /// fn my_function(param: T) /// // ^ Original span /// /// // Result /// fn my_function(param: T) /// // ^^^^^^^^^^^ Generated span with snippet `my_function<T>` /// ``` /// /// Attention: The method used is very fragile since it essentially duplicates the work of the /// parser. If you need to use this function or something similar, please consider updating the /// source_map functions and this function to something more robust. pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> { // Try to extend the span to the previous "fn" keyword to retrieve the function // signature let sugg_span = self.span_extend_to_prev_str(span, "fn", false); if sugg_span != span { if let Ok(snippet) = self.span_to_snippet(sugg_span) { // Consume the function name let mut offset = snippet.find(|c: char| !c.is_alphanumeric() && c != '_') .expect("no label after fn"); // Consume the generics part of the function signature let mut bracket_counter = 0; let mut last_char = None; for c in snippet[offset..].chars() { match c { '<' => bracket_counter += 1, '>' => bracket_counter -= 1, '(' => if bracket_counter == 0 { break; } _ => {} } offset += c.len_utf8(); last_char = Some(c); } // Adjust the suggestion span to encompass the function name with its generics let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32)); // Prepare the new suggested snippet to append the type parameter that triggered // the error in the generics of the function signature let mut new_snippet = if last_char == Some('>') { format!("{}, ", &snippet[..(offset - '>'.len_utf8())]) } else { format!("{}<", &snippet[..offset]) }; new_snippet.push_str(&self.span_to_snippet(span).unwrap_or("T".to_string())); new_snippet.push('>'); return Some((sugg_span, new_snippet)); } } None } } impl SourceMapper for SourceMap { fn lookup_char_pos(&self, pos: BytePos) -> Loc { self.lookup_char_pos(pos) } fn span_to_lines(&self, sp: Span) -> FileLinesResult { self.span_to_lines(sp) } fn span_to_string(&self, sp: Span) -> String { self.span_to_string(sp) } fn span_to_filename(&self, sp: Span) -> FileName { self.span_to_filename(sp) } fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> { self.merge_spans(sp_lhs, sp_rhs) } fn call_span_if_macro(&self, sp: Span) -> Span { if self.span_to_filename(sp.clone()).is_macros() { let v = sp.macro_backtrace(); if let Some(use_site) = v.last() { return use_site.call_site; } } sp } fn ensure_source_file_source_present(&self, file_map: Lrc<SourceFile>) -> bool { file_map.add_external_src( || match file_map.name { FileName::Real(ref name) => self.file_loader.read_file(name).ok(), _ => None, } ) } fn doctest_offset_line(&self, line: usize) -> usize { self.doctest_offset_line(line) } } #[derive(Clone)] pub struct FilePathMapping { mapping: Vec<(PathBuf, PathBuf)>, } impl FilePathMapping { pub fn empty() -> FilePathMapping { FilePathMapping { mapping: vec![] } } pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping { FilePathMapping { mapping, } } /// Applies any path prefix substitution as defined by the mapping. /// The return value is the remapped path and a boolean indicating whether /// the path was affected by the mapping. pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) { // NOTE: We are iterating over the mapping entries from last to first // because entries specified later on the command line should // take precedence. for &(ref from, ref to) in self.mapping.iter().rev() { if let Ok(rest) = path.strip_prefix(from) { return (to.join(rest), true); } } (path, false) } } // _____________________________________________________________________________ // Tests // #[cfg(test)] mod tests { use super::*; use rustc_data_structures::sync::Lrc; fn init_code_map() -> SourceMap { let cm = SourceMap::new(FilePathMapping::empty()); cm.new_source_file(PathBuf::from("blork.rs").into(), "first line.\nsecond line".to_string()); cm.new_source_file(PathBuf::from("empty.rs").into(), "".to_string()); cm.new_source_file(PathBuf::from("blork2.rs").into(), "first line.\nsecond line".to_string()); cm } #[test] fn t3() { // Test lookup_byte_offset let cm = init_code_map(); let fmabp1 = cm.lookup_byte_offset(BytePos(23)); assert_eq!(fmabp1.fm.name, PathBuf::from("blork.rs").into()); assert_eq!(fmabp1.pos, BytePos(23)); let fmabp1 = cm.lookup_byte_offset(BytePos(24)); assert_eq!(fmabp1.fm.name, PathBuf::from("empty.rs").into()); assert_eq!(fmabp1.pos, BytePos(0)); let fmabp2 = cm.lookup_byte_offset(BytePos(25)); assert_eq!(fmabp2.fm.name, PathBuf::from("blork2.rs").into()); assert_eq!(fmabp2.pos, BytePos(0)); } #[test] fn t4() { // Test bytepos_to_file_charpos let cm = init_code_map(); let cp1 = cm.bytepos_to_file_charpos(BytePos(22)); assert_eq!(cp1, CharPos(22)); let cp2 = cm.bytepos_to_file_charpos(BytePos(25)); assert_eq!(cp2, CharPos(0)); } #[test] fn t5() { // Test zero-length source_files. let cm = init_code_map(); let loc1 = cm.lookup_char_pos(BytePos(22)); assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into()); assert_eq!(loc1.line, 2); assert_eq!(loc1.col, CharPos(10)); let loc2 = cm.lookup_char_pos(BytePos(25)); assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into()); assert_eq!(loc2.line, 1); assert_eq!(loc2.col, CharPos(0)); } fn init_code_map_mbc() -> SourceMap { let cm = SourceMap::new(FilePathMapping::empty()); // € is a three byte utf8 char. cm.new_source_file(PathBuf::from("blork.rs").into(), "fir€st €€€€ line.\nsecond line".to_string()); cm.new_source_file(PathBuf::from("blork2.rs").into(), "first line€€.\n€ second line".to_string()); cm } #[test] fn t6() { // Test bytepos_to_file_charpos in the presence of multi-byte chars let cm = init_code_map_mbc(); let cp1 = cm.bytepos_to_file_charpos(BytePos(3)); assert_eq!(cp1, CharPos(3)); let cp2 = cm.bytepos_to_file_charpos(BytePos(6)); assert_eq!(cp2, CharPos(4)); let cp3 = cm.bytepos_to_file_charpos(BytePos(56)); assert_eq!(cp3, CharPos(12)); let cp4 = cm.bytepos_to_file_charpos(BytePos(61)); assert_eq!(cp4, CharPos(15)); } #[test] fn t7() { // Test span_to_lines for a span ending at the end of source_file let cm = init_code_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let file_lines = cm.span_to_lines(span).unwrap(); assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into()); assert_eq!(file_lines.lines.len(), 1); assert_eq!(file_lines.lines[0].line_index, 1); } /// Given a string like " ~~~~~~~~~~~~ ", produces a span /// converting that range. The idea is that the string has the same /// length as the input, and we uncover the byte positions. Note /// that this can span lines and so on. fn span_from_selection(input: &str, selection: &str) -> Span { assert_eq!(input.len(), selection.len()); let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) } /// Test span_to_snippet and span_to_lines for a span converting 3 /// lines in the middle of a file. #[test] fn span_to_snippet_and_lines_spanning_multiple_lines() { let cm = SourceMap::new(FilePathMapping::empty()); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; let selection = " \n ~~\n~~~\n~~~~~ \n \n"; cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string()); let span = span_from_selection(inputtext, selection); // check that we are extracting the text we thought we were extracting assert_eq!(&cm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD"); // check that span_to_lines gives us the complete result with the lines/cols we expected let lines = cm.span_to_lines(span).unwrap(); let expected = vec![ LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) }, LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) }, LineInfo { line_index: 3, start_col: CharPos(0), end_col: CharPos(5) } ]; assert_eq!(lines.lines, expected); } #[test] fn t8() { // Test span_to_snippet for a span ending at the end of source_file let cm = init_code_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let snippet = cm.span_to_snippet(span); assert_eq!(snippet, Ok("second line".to_string())); } #[test] fn t9() { // Test span_to_str for a span ending at the end of source_file let cm = init_code_map(); let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let sstr = cm.span_to_string(span); assert_eq!(sstr, "blork.rs:2:1: 2:12"); } /// Test failing to merge two spans on different lines #[test] fn span_merging_fail() { let cm = SourceMap::new(FilePathMapping::empty()); let inputtext = "bbbb BB\ncc CCC\n"; let selection1 = " ~~\n \n"; let selection2 = " \n ~~~\n"; cm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned()); let span1 = span_from_selection(inputtext, selection1); let span2 = span_from_selection(inputtext, selection2); assert!(cm.merge_spans(span1, span2).is_none()); } /// Returns the span corresponding to the `n`th occurrence of /// `substring` in `source_text`. trait SourceMapExtension { fn span_substr(&self, file: &Lrc<SourceFile>, source_text: &str, substring: &str, n: usize) -> Span; } impl SourceMapExtension for SourceMap { fn span_substr(&self, file: &Lrc<SourceFile>, source_text: &str, substring: &str, n: usize) -> Span { println!("span_substr(file={:?}/{:?}, substring={:?}, n={})", file.name, file.start_pos, substring, n); let mut i = 0; let mut hi = 0; loop { let offset = source_text[hi..].find(substring).unwrap_or_else(|| { panic!("source_text `{}` does not have {} occurrences of `{}`, only {}", source_text, n, substring, i); }); let lo = hi + offset; hi = lo + substring.len(); if i == n { let span = Span::new( BytePos(lo as u32 + file.start_pos.0), BytePos(hi as u32 + file.start_pos.0), NO_EXPANSION, ); assert_eq!(&self.span_to_snippet(span).unwrap()[..], substring); return span; } i += 1; } } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/test_snippet.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use source_map::{SourceMap, FilePathMapping}; use errors::Handler; use errors::emitter::EmitterWriter; use std::io; use std::io::prelude::*; use rustc_data_structures::sync::Lrc; use std::str; use std::sync::{Arc, Mutex}; use std::path::Path; use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan}; use with_globals; /// Identify a position in the text by the Nth occurrence of a string. struct Position { string: &'static str, count: usize, } struct SpanLabel { start: Position, end: Position, label: &'static str, } struct Shared<T: Write> { data: Arc<Mutex<T>>, } impl<T: Write> Write for Shared<T> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.data.lock().unwrap().write(buf) } fn flush(&mut self) -> io::Result<()> { self.data.lock().unwrap().flush() } } fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) { with_globals(|| { let output = Arc::new(Mutex::new(Vec::new())); let code_map = Lrc::new(SourceMap::new(FilePathMapping::empty())); code_map.new_source_file(Path::new("test.rs").to_owned().into(), file_text.to_owned()); let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); let mut msp = MultiSpan::from_span(primary_span); for span_label in span_labels { let span = make_span(&file_text, &span_label.start, &span_label.end); msp.push_span_label(span, span_label.label.to_string()); println!("span: {:?} label: {:?}", span, span_label.label); println!("text: {:?}", code_map.span_to_snippet(span)); } let emitter = EmitterWriter::new(Box::new(Shared { data: output.clone() }), Some(code_map.clone()), false, false); let handler = Handler::with_emitter(true, false, Box::new(emitter)); handler.span_err(msp, "foo"); assert!(expected_output.chars().next() == Some('\n'), "expected output should begin with newline"); let expected_output = &expected_output[1..]; let bytes = output.lock().unwrap(); let actual_output = str::from_utf8(&bytes).unwrap(); println!("expected output:\n------\n{}------", expected_output); println!("actual output:\n------\n{}------", actual_output); assert!(expected_output == actual_output) }) } fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { let start = make_pos(file_text, start); let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends assert!(start <= end); Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION) } fn make_pos(file_text: &str, pos: &Position) -> usize { let mut remainder = file_text; let mut offset = 0; for _ in 0..pos.count { if let Some(n) = remainder.find(&pos.string) { offset += n; remainder = &remainder[n + 1..]; } else { panic!("failed to find {} instances of {:?} in {:?}", pos.count, pos.string, file_text); } } offset } #[test] fn ends_on_col0() { test_harness(r#" fn foo() { } "#, vec![ SpanLabel { start: Position { string: "{", count: 1, }, end: Position { string: "}", count: 1, }, label: "test", }, ], r#" error: foo --> test.rs:2:10 | 2 | fn foo() { | __________^ 3 | | } | |_^ test "#); } #[test] fn ends_on_col2() { test_harness(r#" fn foo() { } "#, vec![ SpanLabel { start: Position { string: "{", count: 1, }, end: Position { string: "}", count: 1, }, label: "test", }, ], r#" error: foo --> test.rs:2:10 | 2 | fn foo() { | __________^ 3 | | 4 | | 5 | | } | |___^ test "#); } #[test] fn non_nested() { test_harness(r#" fn foo() { X0 Y0 X1 Y1 X2 Y2 } "#, vec![ SpanLabel { start: Position { string: "X0", count: 1, }, end: Position { string: "X2", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "Y2", count: 1, }, label: "`Y` is a good letter too", }, ], r#" error: foo --> test.rs:3:3 | 3 | X0 Y0 | ____^__- | | ___| | || 4 | || X1 Y1 5 | || X2 Y2 | ||____^__- `Y` is a good letter too | |____| | `X` is a good letter "#); } #[test] fn nested() { test_harness(r#" fn foo() { X0 Y0 Y1 X1 } "#, vec![ SpanLabel { start: Position { string: "X0", count: 1, }, end: Position { string: "X1", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "Y1", count: 1, }, label: "`Y` is a good letter too", }, ], r#" error: foo --> test.rs:3:3 | 3 | X0 Y0 | ____^__- | | ___| | || 4 | || Y1 X1 | ||____-__^ `X` is a good letter | |_____| | `Y` is a good letter too "#); } #[test] fn different_overlap() { test_harness(r#" fn foo() { X0 Y0 Z0 X1 Y1 Z1 X2 Y2 Z2 X3 Y3 Z3 } "#, vec![ SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "X2", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Z1", count: 1, }, end: Position { string: "X3", count: 1, }, label: "`Y` is a good letter too", }, ], r#" error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 | ______^ 4 | | X1 Y1 Z1 | |_________- 5 | || X2 Y2 Z2 | ||____^ `X` is a good letter 6 | | X3 Y3 Z3 | |_____- `Y` is a good letter too "#); } #[test] fn triple_overlap() { test_harness(r#" fn foo() { X0 Y0 Z0 X1 Y1 Z1 X2 Y2 Z2 } "#, vec![ SpanLabel { start: Position { string: "X0", count: 1, }, end: Position { string: "X2", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "Y2", count: 1, }, label: "`Y` is a good letter too", }, SpanLabel { start: Position { string: "Z0", count: 1, }, end: Position { string: "Z2", count: 1, }, label: "`Z` label", }, ], r#" error: foo --> test.rs:3:3 | 3 | X0 Y0 Z0 | _____^__-__- | | ____|__| | || ___| | ||| 4 | ||| X1 Y1 Z1 5 | ||| X2 Y2 Z2 | |||____^__-__- `Z` label | ||____|__| | |____| `Y` is a good letter too | `X` is a good letter "#); } #[test] fn minimum_depth() { test_harness(r#" fn foo() { X0 Y0 Z0 X1 Y1 Z1 X2 Y2 Z2 X3 Y3 Z3 } "#, vec![ SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "X1", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Y1", count: 1, }, end: Position { string: "Z2", count: 1, }, label: "`Y` is a good letter too", }, SpanLabel { start: Position { string: "X2", count: 1, }, end: Position { string: "Y3", count: 1, }, label: "`Z`", }, ], r#" error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 | ______^ 4 | | X1 Y1 Z1 | |____^_- | ||____| | | `X` is a good letter 5 | | X2 Y2 Z2 | |____-______- `Y` is a good letter too | ____| | | 6 | | X3 Y3 Z3 | |________- `Z` "#); } #[test] fn non_overlaping() { test_harness(r#" fn foo() { X0 Y0 Z0 X1 Y1 Z1 X2 Y2 Z2 X3 Y3 Z3 } "#, vec![ SpanLabel { start: Position { string: "X0", count: 1, }, end: Position { string: "X1", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Y2", count: 1, }, end: Position { string: "Z3", count: 1, }, label: "`Y` is a good letter too", }, ], r#" error: foo --> test.rs:3:3 | 3 | / X0 Y0 Z0 4 | | X1 Y1 Z1 | |____^ `X` is a good letter 5 | X2 Y2 Z2 | ______- 6 | | X3 Y3 Z3 | |__________- `Y` is a good letter too "#); } #[test] fn overlaping_start_and_end() { test_harness(r#" fn foo() { X0 Y0 Z0 X1 Y1 Z1 X2 Y2 Z2 X3 Y3 Z3 } "#, vec![ SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "X1", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Z1", count: 1, }, end: Position { string: "Z3", count: 1, }, label: "`Y` is a good letter too", }, ], r#" error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 | ______^ 4 | | X1 Y1 Z1 | |____^____- | ||____| | | `X` is a good letter 5 | | X2 Y2 Z2 6 | | X3 Y3 Z3 | |___________- `Y` is a good letter too "#); } #[test] fn multiple_labels_primary_without_message() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "b", count: 1, }, end: Position { string: "}", count: 1, }, label: "", }, SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "`a` is a good letter", }, SpanLabel { start: Position { string: "c", count: 1, }, end: Position { string: "c", count: 1, }, label: "", }, ], r#" error: foo --> test.rs:3:7 | 3 | a { b { c } d } | ----^^^^-^^-- `a` is a good letter "#); } #[test] fn multiple_labels_secondary_without_message() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "`a` is a good letter", }, SpanLabel { start: Position { string: "b", count: 1, }, end: Position { string: "}", count: 1, }, label: "", }, ], r#" error: foo --> test.rs:3:3 | 3 | a { b { c } d } | ^^^^-------^^ `a` is a good letter "#); } #[test] fn multiple_labels_primary_without_message_2() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "b", count: 1, }, end: Position { string: "}", count: 1, }, label: "`b` is a good letter", }, SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "", }, SpanLabel { start: Position { string: "c", count: 1, }, end: Position { string: "c", count: 1, }, label: "", }, ], r#" error: foo --> test.rs:3:7 | 3 | a { b { c } d } | ----^^^^-^^-- | | | `b` is a good letter "#); } #[test] fn multiple_labels_secondary_without_message_2() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "", }, SpanLabel { start: Position { string: "b", count: 1, }, end: Position { string: "}", count: 1, }, label: "`b` is a good letter", }, ], r#" error: foo --> test.rs:3:3 | 3 | a { b { c } d } | ^^^^-------^^ | | | `b` is a good letter "#); } #[test] fn multiple_labels_secondary_without_message_3() { test_harness(r#" fn foo() { a bc d } "#, vec![ SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "b", count: 1, }, label: "`a` is a good letter", }, SpanLabel { start: Position { string: "c", count: 1, }, end: Position { string: "d", count: 1, }, label: "", }, ], r#" error: foo --> test.rs:3:3 | 3 | a bc d | ^^^^---- | | | `a` is a good letter "#); } #[test] fn multiple_labels_without_message() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "", }, SpanLabel { start: Position { string: "b", count: 1, }, end: Position { string: "}", count: 1, }, label: "", }, ], r#" error: foo --> test.rs:3:3 | 3 | a { b { c } d } | ^^^^-------^^ "#); } #[test] fn multiple_labels_without_message_2() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "b", count: 1, }, end: Position { string: "}", count: 1, }, label: "", }, SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "", }, SpanLabel { start: Position { string: "c", count: 1, }, end: Position { string: "c", count: 1, }, label: "", }, ], r#" error: foo --> test.rs:3:7 | 3 | a { b { c } d } | ----^^^^-^^-- "#); } #[test] fn multiple_labels_with_message() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "`a` is a good letter", }, SpanLabel { start: Position { string: "b", count: 1, }, end: Position { string: "}", count: 1, }, label: "`b` is a good letter", }, ], r#" error: foo --> test.rs:3:3 | 3 | a { b { c } d } | ^^^^-------^^ | | | | | `b` is a good letter | `a` is a good letter "#); } #[test] fn single_label_with_message() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "`a` is a good letter", }, ], r#" error: foo --> test.rs:3:3 | 3 | a { b { c } d } | ^^^^^^^^^^^^^ `a` is a good letter "#); } #[test] fn single_label_without_message() { test_harness(r#" fn foo() { a { b { c } d } } "#, vec![ SpanLabel { start: Position { string: "a", count: 1, }, end: Position { string: "d", count: 1, }, label: "", }, ], r#" error: foo --> test.rs:3:3 | 3 | a { b { c } d } | ^^^^^^^^^^^^^ "#); } #[test] fn long_snippet() { test_harness(r#" fn foo() { X0 Y0 Z0 X1 Y1 Z1 1 2 3 4 5 6 7 8 9 10 X2 Y2 Z2 X3 Y3 Z3 } "#, vec![ SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "X1", count: 1, }, label: "`X` is a good letter", }, SpanLabel { start: Position { string: "Z1", count: 1, }, end: Position { string: "Z3", count: 1, }, label: "`Y` is a good letter too", }, ], r#" error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 | ______^ 4 | | X1 Y1 Z1 | |____^____- | ||____| | | `X` is a good letter 5 | | 1 6 | | 2 7 | | 3 ... | 15 | | X2 Y2 Z2 16 | | X3 Y3 Z3 | |___________- `Y` is a good letter too "#); } #[test] fn long_snippet_multiple_spans() { test_harness(r#" fn foo() { X0 Y0 Z0 1 2 3 X1 Y1 Z1 4 5 6 X2 Y2 Z2 7 8 9 10 X3 Y3 Z3 } "#, vec![ SpanLabel { start: Position { string: "Y0", count: 1, }, end: Position { string: "Y3", count: 1, }, label: "`Y` is a good letter", }, SpanLabel { start: Position { string: "Z1", count: 1, }, end: Position { string: "Z2", count: 1, }, label: "`Z` is a good letter too", }, ], r#" error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 | ______^ 4 | | 1 5 | | 2 6 | | 3 7 | | X1 Y1 Z1 | |_________- 8 | || 4 9 | || 5 10 | || 6 11 | || X2 Y2 Z2 | ||__________- `Z` is a good letter too ... | 15 | | 10 16 | | X3 Y3 Z3 | |_______^ `Y` is a good letter "#); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/config.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use attr::HasAttrs; use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue}; use {fold, attr}; use ast; use source_map::Spanned; use edition::Edition; use parse::{token, ParseSess}; use OneVector; use ptr::P; /// A folder that strips out items that do not belong in the current configuration. pub struct StripUnconfigured<'a> { pub should_test: bool, pub sess: &'a ParseSess, pub features: Option<&'a Features>, } // `cfg_attr`-process the crate's attributes and compute the crate's features. pub fn features(mut krate: ast::Crate, sess: &ParseSess, should_test: bool, edition: Edition) -> (ast::Crate, Features) { let features; { let mut strip_unconfigured = StripUnconfigured { should_test, sess, features: None, }; let unconfigured_attrs = krate.attrs.clone(); let err_count = sess.span_diagnostic.err_count(); if let Some(attrs) = strip_unconfigured.configure(krate.attrs) { krate.attrs = attrs; } else { // the entire crate is unconfigured krate.attrs = Vec::new(); krate.module.items = Vec::new(); return (krate, Features::new()); } features = get_features(&sess.span_diagnostic, &krate.attrs, edition); // Avoid reconfiguring malformed `cfg_attr`s if err_count == sess.span_diagnostic.err_count() { strip_unconfigured.features = Some(&features); strip_unconfigured.configure(unconfigured_attrs); } } (krate, features) } macro_rules! configure { ($this:ident, $node:ident) => { match $this.configure($node) { Some(node) => node, None => return Default::default(), } } } impl<'a> StripUnconfigured<'a> { pub fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> { let node = self.process_cfg_attrs(node); if self.in_cfg(node.attrs()) { Some(node) } else { None } } pub fn process_cfg_attrs<T: HasAttrs>(&mut self, node: T) -> T { node.map_attrs(|attrs| { attrs.into_iter().filter_map(|attr| self.process_cfg_attr(attr)).collect() }) } fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> { if !attr.check_name("cfg_attr") { return Some(attr); } let (cfg, path, tokens, span) = match attr.parse(self.sess, |parser| { parser.expect(&token::OpenDelim(token::Paren))?; let cfg = parser.parse_meta_item()?; parser.expect(&token::Comma)?; let lo = parser.span.lo(); let (path, tokens) = parser.parse_meta_item_unrestricted()?; parser.expect(&token::CloseDelim(token::Paren))?; Ok((cfg, path, tokens, parser.prev_span.with_lo(lo))) }) { Ok(result) => result, Err(mut e) => { e.emit(); return None; } }; if attr::cfg_matches(&cfg, self.sess, self.features) { self.process_cfg_attr(ast::Attribute { id: attr::mk_attr_id(), style: attr.style, path, tokens, is_sugared_doc: false, span, }) } else { None } } // Determine if a node with the given attributes should be included in this configuration. pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool { attrs.iter().all(|attr| { // When not compiling with --test we should not compile the #[test] functions if !self.should_test && is_test_or_bench(attr) { return false; } let mis = if !is_cfg(attr) { return true; } else if let Some(mis) = attr.meta_item_list() { mis } else { return true; }; if mis.len() != 1 { self.sess.span_diagnostic.span_err(attr.span, "expected 1 cfg-pattern"); return true; } if !mis[0].is_meta_item() { self.sess.span_diagnostic.span_err(mis[0].span, "unexpected literal"); return true; } attr::cfg_matches(mis[0].meta_item().unwrap(), self.sess, self.features) }) } // Visit attributes on expression and statements (but not attributes on items in blocks). fn visit_expr_attrs(&mut self, attrs: &[ast::Attribute]) { // flag the offending attributes for attr in attrs.iter() { self.maybe_emit_expr_attr_err(attr); } } /// If attributes are not allowed on expressions, emit an error for `attr` pub fn maybe_emit_expr_attr_err(&self, attr: &ast::Attribute) { if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) { let mut err = feature_err(self.sess, "stmt_expr_attributes", attr.span, GateIssue::Language, EXPLAIN_STMT_ATTR_SYNTAX); if attr.is_sugared_doc { err.help("`///` is for documentation comments. For a plain comment, use `//`."); } err.emit(); } } pub fn configure_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod { ast::ForeignMod { abi: foreign_mod.abi, items: foreign_mod.items.into_iter().filter_map(|item| self.configure(item)).collect(), } } fn configure_variant_data(&mut self, vdata: ast::VariantData) -> ast::VariantData { match vdata { ast::VariantData::Struct(fields, id) => { let fields = fields.into_iter().filter_map(|field| self.configure(field)); ast::VariantData::Struct(fields.collect(), id) } ast::VariantData::Tuple(fields, id) => { let fields = fields.into_iter().filter_map(|field| self.configure(field)); ast::VariantData::Tuple(fields.collect(), id) } ast::VariantData::Unit(id) => ast::VariantData::Unit(id) } } pub fn configure_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind { match item { ast::ItemKind::Struct(def, generics) => { ast::ItemKind::Struct(self.configure_variant_data(def), generics) } ast::ItemKind::Union(def, generics) => { ast::ItemKind::Union(self.configure_variant_data(def), generics) } ast::ItemKind::Enum(def, generics) => { let variants = def.variants.into_iter().filter_map(|v| { self.configure(v).map(|v| { Spanned { node: ast::Variant_ { ident: v.node.ident, attrs: v.node.attrs, data: self.configure_variant_data(v.node.data), disr_expr: v.node.disr_expr, }, span: v.span } }) }); ast::ItemKind::Enum(ast::EnumDef { variants: variants.collect(), }, generics) } item => item, } } pub fn configure_expr_kind(&mut self, expr_kind: ast::ExprKind) -> ast::ExprKind { match expr_kind { ast::ExprKind::Match(m, arms) => { let arms = arms.into_iter().filter_map(|a| self.configure(a)).collect(); ast::ExprKind::Match(m, arms) } ast::ExprKind::Struct(path, fields, base) => { let fields = fields.into_iter() .filter_map(|field| { self.configure(field) }) .collect(); ast::ExprKind::Struct(path, fields, base) } _ => expr_kind, } } pub fn configure_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> { self.visit_expr_attrs(expr.attrs()); // If an expr is valid to cfg away it will have been removed by the // outer stmt or expression folder before descending in here. // Anything else is always required, and thus has to error out // in case of a cfg attr. // // NB: This is intentionally not part of the fold_expr() function // in order for fold_opt_expr() to be able to avoid this check if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a) || is_test_or_bench(a)) { let msg = "removing an expression is not supported in this position"; self.sess.span_diagnostic.span_err(attr.span, msg); } self.process_cfg_attrs(expr) } pub fn configure_stmt(&mut self, stmt: ast::Stmt) -> Option<ast::Stmt> { self.configure(stmt) } pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> { self.configure(field) } pub fn configure_pat(&mut self, pattern: P<ast::Pat>) -> P<ast::Pat> { pattern.map(|mut pattern| { if let ast::PatKind::Struct(path, fields, etc) = pattern.node { let fields = fields.into_iter() .filter_map(|field| { self.configure(field) }) .collect(); pattern.node = ast::PatKind::Struct(path, fields, etc); } pattern }) } // deny #[cfg] on generic parameters until we decide what to do with it. // see issue #51279. pub fn disallow_cfg_on_generic_param(&mut self, param: &ast::GenericParam) { for attr in param.attrs() { let offending_attr = if attr.check_name("cfg") { "cfg" } else if attr.check_name("cfg_attr") { "cfg_attr" } else { continue; }; let msg = format!("#[{}] cannot be applied on a generic parameter", offending_attr); self.sess.span_diagnostic.span_err(attr.span, &msg); } } } impl<'a> fold::Folder for StripUnconfigured<'a> { fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod { let foreign_mod = self.configure_foreign_mod(foreign_mod); fold::noop_fold_foreign_mod(foreign_mod, self) } fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind { let item = self.configure_item_kind(item); fold::noop_fold_item_kind(item, self) } fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> { let mut expr = self.configure_expr(expr).into_inner(); expr.node = self.configure_expr_kind(expr.node); P(fold::noop_fold_expr(expr, self)) } fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> { let mut expr = configure!(self, expr).into_inner(); expr.node = self.configure_expr_kind(expr.node); Some(P(fold::noop_fold_expr(expr, self))) } fn fold_stmt(&mut self, stmt: ast::Stmt) -> OneVector<ast::Stmt> { match self.configure_stmt(stmt) { Some(stmt) => fold::noop_fold_stmt(stmt, self), None => return OneVector::new(), } } fn fold_item(&mut self, item: P<ast::Item>) -> OneVector<P<ast::Item>> { fold::noop_fold_item(configure!(self, item), self) } fn fold_impl_item(&mut self, item: ast::ImplItem) -> OneVector<ast::ImplItem> { fold::noop_fold_impl_item(configure!(self, item), self) } fn fold_trait_item(&mut self, item: ast::TraitItem) -> OneVector<ast::TraitItem> { fold::noop_fold_trait_item(configure!(self, item), self) } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { // Don't configure interpolated AST (c.f. #34171). // Interpolated AST will get configured once the surrounding tokens are parsed. mac } fn fold_pat(&mut self, pattern: P<ast::Pat>) -> P<ast::Pat> { fold::noop_fold_pat(self.configure_pat(pattern), self) } } fn is_cfg(attr: &ast::Attribute) -> bool { attr.check_name("cfg") } pub fn is_test_or_bench(attr: &ast::Attribute) -> bool { attr.check_name("test") || attr.check_name("bench") }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/lib.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The Rust parser and macro expander. //! //! # Note //! //! This API is completely unstable and subject to change. #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(deny(warnings))))] #![feature(crate_visibility_modifier)] #![feature(macro_at_most_once_rep)] #![cfg_attr(not(stage0), feature(nll))] #![feature(rustc_attrs)] #![feature(rustc_diagnostic_macros)] #![feature(slice_sort_by_cached_key)] #![feature(str_escape)] #![feature(unicode_internals)] // #![feature(catch_expr)] #![feature(rustc_private)] #![recursion_limit="256"] #[macro_use] extern crate bitflags; extern crate core; extern crate serialize; #[macro_use] extern crate log; pub extern crate rustc_errors as errors; extern crate syntax_pos; extern crate rustc_data_structures; extern crate rustc_target; #[macro_use] extern crate scoped_tls; #[macro_use] extern crate smallvec; extern crate serialize as rustc_serialize; // used by deriving use rustc_data_structures::sync::Lock; use rustc_data_structures::bitvec::BitVector; pub use rustc_data_structures::small_vec::OneVector; pub use rustc_data_structures::thin_vec::ThinVec; use ast::AttrId; // A variant of 'try!' that panics on an Err. This is used as a crutch on the // way towards a non-panic!-prone parser. It should be used for fatal parsing // errors; eventually we plan to convert all code using panictry to just use // normal try. // Exported for syntax_ext, not meant for general use. #[macro_export] macro_rules! panictry { ($e:expr) => ({ use std::result::Result::{Ok, Err}; use errors::FatalError; match $e { Ok(e) => e, Err(mut e) => { e.emit(); FatalError.raise() } } }) } #[macro_export] macro_rules! unwrap_or { ($opt:expr, $default:expr) => { match $opt { Some(x) => x, None => $default, } } } pub struct Globals { used_attrs: Lock<BitVector<AttrId>>, known_attrs: Lock<BitVector<AttrId>>, syntax_pos_globals: syntax_pos::Globals, } impl Globals { fn new() -> Globals { Globals { // We have no idea how many attributes their will be, so just // initiate the vectors with 0 bits. We'll grow them as necessary. used_attrs: Lock::new(BitVector::new()), known_attrs: Lock::new(BitVector::new()), syntax_pos_globals: syntax_pos::Globals::new(), } } } pub fn with_globals<F, R>(f: F) -> R where F: FnOnce() -> R { let globals = Globals::new(); GLOBALS.set(&globals, || { syntax_pos::GLOBALS.set(&globals.syntax_pos_globals, f) }) } scoped_thread_local!(pub static GLOBALS: Globals); #[macro_use] pub mod diagnostics { #[macro_use] pub mod macros; pub mod plugin; pub mod metadata; } // NB: This module needs to be declared first so diagnostics are // registered before they are used. pub mod diagnostic_list; pub mod util { pub mod lev_distance; pub mod node_count; pub mod parser; #[cfg(test)] pub mod parser_testing; pub mod move_map; mod rc_slice; pub use self::rc_slice::RcSlice; mod rc_vec; pub use self::rc_vec::RcVec; } pub mod json; pub mod syntax { pub use ext; pub use parse; pub use ast; } pub mod ast; pub mod attr; pub mod source_map; #[macro_use] pub mod config; pub mod entry; pub mod feature_gate; pub mod fold; pub mod parse; pub mod ptr; pub mod show_span; pub mod std_inject; pub mod str; pub use syntax_pos::edition; pub use syntax_pos::symbol; pub mod test; pub mod tokenstream; pub mod visit; pub mod print { pub mod pp; pub mod pprust; } pub mod ext { pub use syntax_pos::hygiene; pub mod base; pub mod build; pub mod derive; pub mod expand; pub mod placeholders; pub mod quote; pub mod source_util; pub mod tt { pub mod transcribe; pub mod macro_parser; pub mod macro_rules; pub mod quoted; } } pub mod early_buffered_lints; #[cfg(test)] mod test_snippet; // __build_diagnostic_array! { libsyntax, DIAGNOSTICS }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/visit.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! AST walker. Each overridden visit method has full control over what //! happens with its node, it can do its own traversal of the node's children, //! call `visit::walk_*` to apply the default traversal algorithm, or prevent //! deeper traversal by doing nothing. //! //! Note: it is an important invariant that the default visitor walks the body //! of a function in "execution order" (more concretely, reverse post-order //! with respect to the CFG implied by the AST), meaning that if AST node A may //! execute before AST node B, then A is visited first. The borrow checker in //! particular relies on this property. //! //! Note: walking an AST before macro expansion is probably a bad idea. For //! instance, a walker looking for item names in a module will miss all of //! those that are created by the expansion of a macro. use ast::*; use syntax_pos::Span; use parse::token::Token; use tokenstream::{TokenTree, TokenStream}; #[derive(Copy, Clone)] pub enum FnKind<'a> { /// fn foo() or extern "Abi" fn foo() ItemFn(Ident, FnHeader, &'a Visibility, &'a Block), /// fn foo(&self) Method(Ident, &'a MethodSig, Option<&'a Visibility>, &'a Block), /// |x, y| body Closure(&'a Expr), } /// Each method of the Visitor trait is a hook to be potentially /// overridden. Each method's default implementation recursively visits /// the substructure of the input via the corresponding `walk` method; /// e.g. the `visit_mod` method by default calls `visit::walk_mod`. /// /// If you want to ensure that your code handles every variant /// explicitly, you need to override each method. (And you also need /// to monitor future changes to `Visitor` in case a new method with a /// new default implementation gets introduced.) pub trait Visitor<'ast>: Sized { fn visit_name(&mut self, _span: Span, _name: Name) { // Nothing to do. } fn visit_ident(&mut self, ident: Ident) { walk_ident(self, ident); } fn visit_mod(&mut self, m: &'ast Mod, _s: Span, _attrs: &[Attribute], _n: NodeId) { walk_mod(self, m); } fn visit_foreign_item(&mut self, i: &'ast ForeignItem) { walk_foreign_item(self, i) } fn visit_global_asm(&mut self, ga: &'ast GlobalAsm) { walk_global_asm(self, ga) } fn visit_item(&mut self, i: &'ast Item) { walk_item(self, i) } fn visit_local(&mut self, l: &'ast Local) { walk_local(self, l) } fn visit_block(&mut self, b: &'ast Block) { walk_block(self, b) } fn visit_stmt(&mut self, s: &'ast Stmt) { walk_stmt(self, s) } fn visit_arm(&mut self, a: &'ast Arm) { walk_arm(self, a) } fn visit_pat(&mut self, p: &'ast Pat) { walk_pat(self, p) } fn visit_anon_const(&mut self, c: &'ast AnonConst) { walk_anon_const(self, c) } fn visit_expr(&mut self, ex: &'ast Expr) { walk_expr(self, ex) } fn visit_expr_post(&mut self, _ex: &'ast Expr) { } fn visit_ty(&mut self, t: &'ast Ty) { walk_ty(self, t) } fn visit_generic_param(&mut self, param: &'ast GenericParam) { walk_generic_param(self, param) } fn visit_generics(&mut self, g: &'ast Generics) { walk_generics(self, g) } fn visit_where_predicate(&mut self, p: &'ast WherePredicate) { walk_where_predicate(self, p) } fn visit_fn(&mut self, fk: FnKind<'ast>, fd: &'ast FnDecl, s: Span, _: NodeId) { walk_fn(self, fk, fd, s) } fn visit_trait_item(&mut self, ti: &'ast TraitItem) { walk_trait_item(self, ti) } fn visit_impl_item(&mut self, ii: &'ast ImplItem) { walk_impl_item(self, ii) } fn visit_trait_ref(&mut self, t: &'ast TraitRef) { walk_trait_ref(self, t) } fn visit_param_bound(&mut self, bounds: &'ast GenericBound) { walk_param_bound(self, bounds) } fn visit_poly_trait_ref(&mut self, t: &'ast PolyTraitRef, m: &'ast TraitBoundModifier) { walk_poly_trait_ref(self, t, m) } fn visit_variant_data(&mut self, s: &'ast VariantData, _: Ident, _: &'ast Generics, _: NodeId, _: Span) { walk_struct_def(self, s) } fn visit_struct_field(&mut self, s: &'ast StructField) { walk_struct_field(self, s) } fn visit_enum_def(&mut self, enum_definition: &'ast EnumDef, generics: &'ast Generics, item_id: NodeId, _: Span) { walk_enum_def(self, enum_definition, generics, item_id) } fn visit_variant(&mut self, v: &'ast Variant, g: &'ast Generics, item_id: NodeId) { walk_variant(self, v, g, item_id) } fn visit_label(&mut self, label: &'ast Label) { walk_label(self, label) } fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) { walk_lifetime(self, lifetime) } fn visit_mac(&mut self, _mac: &'ast Mac) { panic!("visit_mac disabled by default"); // NB: see note about macros above. // if you really want a visitor that // works on macros, use this // definition in your trait impl: // visit::walk_mac(self, _mac) } fn visit_mac_def(&mut self, _mac: &'ast MacroDef, _id: NodeId) { // Nothing to do } fn visit_path(&mut self, path: &'ast Path, _id: NodeId) { walk_path(self, path) } fn visit_use_tree(&mut self, use_tree: &'ast UseTree, id: NodeId, _nested: bool) { walk_use_tree(self, use_tree, id) } fn visit_path_segment(&mut self, path_span: Span, path_segment: &'ast PathSegment) { walk_path_segment(self, path_span, path_segment) } fn visit_generic_args(&mut self, path_span: Span, generic_args: &'ast GenericArgs) { walk_generic_args(self, path_span, generic_args) } fn visit_generic_arg(&mut self, generic_arg: &'ast GenericArg) { match generic_arg { GenericArg::Lifetime(lt) => self.visit_lifetime(lt), GenericArg::Type(ty) => self.visit_ty(ty), } } fn visit_assoc_type_binding(&mut self, type_binding: &'ast TypeBinding) { walk_assoc_type_binding(self, type_binding) } fn visit_attribute(&mut self, attr: &'ast Attribute) { walk_attribute(self, attr) } fn visit_tt(&mut self, tt: TokenTree) { walk_tt(self, tt) } fn visit_tts(&mut self, tts: TokenStream) { walk_tts(self, tts) } fn visit_token(&mut self, _t: Token) {} // FIXME: add `visit_interpolated` and `walk_interpolated` fn visit_vis(&mut self, vis: &'ast Visibility) { walk_vis(self, vis) } fn visit_fn_ret_ty(&mut self, ret_ty: &'ast FunctionRetTy) { walk_fn_ret_ty(self, ret_ty) } } #[macro_export] macro_rules! walk_list { ($visitor: expr, $method: ident, $list: expr) => { for elem in $list { $visitor.$method(elem) } }; ($visitor: expr, $method: ident, $list: expr, $($extra_args: expr),*) => { for elem in $list { $visitor.$method(elem, $($extra_args,)*) } } } pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, ident: Ident) { visitor.visit_name(ident.span, ident.name); } pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) { visitor.visit_mod(&krate.module, krate.span, &krate.attrs, CRATE_NODE_ID); walk_list!(visitor, visit_attribute, &krate.attrs); } pub fn walk_mod<'a, V: Visitor<'a>>(visitor: &mut V, module: &'a Mod) { walk_list!(visitor, visit_item, &module.items); } pub fn walk_local<'a, V: Visitor<'a>>(visitor: &mut V, local: &'a Local) { for attr in local.attrs.iter() { visitor.visit_attribute(attr); } visitor.visit_pat(&local.pat); walk_list!(visitor, visit_ty, &local.ty); walk_list!(visitor, visit_expr, &local.init); } pub fn walk_label<'a, V: Visitor<'a>>(visitor: &mut V, label: &'a Label) { visitor.visit_ident(label.ident); } pub fn walk_lifetime<'a, V: Visitor<'a>>(visitor: &mut V, lifetime: &'a Lifetime) { visitor.visit_ident(lifetime.ident); } pub fn walk_poly_trait_ref<'a, V>(visitor: &mut V, trait_ref: &'a PolyTraitRef, _: &TraitBoundModifier) where V: Visitor<'a>, { walk_list!(visitor, visit_generic_param, &trait_ref.bound_generic_params); visitor.visit_trait_ref(&trait_ref.trait_ref); } pub fn walk_trait_ref<'a, V: Visitor<'a>>(visitor: &mut V, trait_ref: &'a TraitRef) { visitor.visit_path(&trait_ref.path, trait_ref.ref_id) } pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) { visitor.visit_vis(&item.vis); visitor.visit_ident(item.ident); match item.node { ItemKind::ExternCrate(orig_name) => { if let Some(orig_name) = orig_name { visitor.visit_name(item.span, orig_name); } } ItemKind::Use(ref use_tree) => { visitor.visit_use_tree(use_tree, item.id, false) } ItemKind::Static(ref typ, _, ref expr) | ItemKind::Const(ref typ, ref expr) => { visitor.visit_ty(typ); visitor.visit_expr(expr); } ItemKind::Fn(ref declaration, header, ref generics, ref body) => { visitor.visit_generics(generics); visitor.visit_fn(FnKind::ItemFn(item.ident, header, &item.vis, body), declaration, item.span, item.id) } ItemKind::Mod(ref module) => { visitor.visit_mod(module, item.span, &item.attrs, item.id) } ItemKind::ForeignMod(ref foreign_module) => { walk_list!(visitor, visit_foreign_item, &foreign_module.items); } ItemKind::GlobalAsm(ref ga) => visitor.visit_global_asm(ga), ItemKind::Ty(ref typ, ref type_parameters) => { visitor.visit_ty(typ); visitor.visit_generics(type_parameters) } ItemKind::Existential(ref bounds, ref type_parameters) => { walk_list!(visitor, visit_param_bound, bounds); visitor.visit_generics(type_parameters) } ItemKind::Enum(ref enum_definition, ref type_parameters) => { visitor.visit_generics(type_parameters); visitor.visit_enum_def(enum_definition, type_parameters, item.id, item.span) } ItemKind::Impl(_, _, _, ref type_parameters, ref opt_trait_reference, ref typ, ref impl_items) => { visitor.visit_generics(type_parameters); walk_list!(visitor, visit_trait_ref, opt_trait_reference); visitor.visit_ty(typ); walk_list!(visitor, visit_impl_item, impl_items); } ItemKind::Struct(ref struct_definition, ref generics) | ItemKind::Union(ref struct_definition, ref generics) => { visitor.visit_generics(generics); visitor.visit_variant_data(struct_definition, item.ident, generics, item.id, item.span); } ItemKind::Trait(.., ref generics, ref bounds, ref methods) => { visitor.visit_generics(generics); walk_list!(visitor, visit_param_bound, bounds); walk_list!(visitor, visit_trait_item, methods); } ItemKind::TraitAlias(ref generics, ref bounds) => { visitor.visit_generics(generics); walk_list!(visitor, visit_param_bound, bounds); } ItemKind::Mac(ref mac) => visitor.visit_mac(mac), ItemKind::MacroDef(ref ts) => visitor.visit_mac_def(ts, item.id), } walk_list!(visitor, visit_attribute, &item.attrs); } pub fn walk_enum_def<'a, V: Visitor<'a>>(visitor: &mut V, enum_definition: &'a EnumDef, generics: &'a Generics, item_id: NodeId) { walk_list!(visitor, visit_variant, &enum_definition.variants, generics, item_id); } pub fn walk_variant<'a, V>(visitor: &mut V, variant: &'a Variant, generics: &'a Generics, item_id: NodeId) where V: Visitor<'a>, { visitor.visit_ident(variant.node.ident); visitor.visit_variant_data(&variant.node.data, variant.node.ident, generics, item_id, variant.span); walk_list!(visitor, visit_anon_const, &variant.node.disr_expr); walk_list!(visitor, visit_attribute, &variant.node.attrs); } pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) { match typ.node { TyKind::Slice(ref ty) | TyKind::Paren(ref ty) => { visitor.visit_ty(ty) } TyKind::Ptr(ref mutable_type) => { visitor.visit_ty(&mutable_type.ty) } TyKind::Rptr(ref opt_lifetime, ref mutable_type) => { walk_list!(visitor, visit_lifetime, opt_lifetime); visitor.visit_ty(&mutable_type.ty) } TyKind::Never => {}, TyKind::Tup(ref tuple_element_types) => { walk_list!(visitor, visit_ty, tuple_element_types); } TyKind::BareFn(ref function_declaration) => { walk_list!(visitor, visit_generic_param, &function_declaration.generic_params); walk_fn_decl(visitor, &function_declaration.decl); } TyKind::Path(ref maybe_qself, ref path) => { if let Some(ref qself) = *maybe_qself { visitor.visit_ty(&qself.ty); } visitor.visit_path(path, typ.id); } TyKind::Array(ref ty, ref length) => { visitor.visit_ty(ty); visitor.visit_anon_const(length) } TyKind::TraitObject(ref bounds, ..) | TyKind::ImplTrait(_, ref bounds) => { walk_list!(visitor, visit_param_bound, bounds); } TyKind::Typeof(ref expression) => { visitor.visit_anon_const(expression) } TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => {} TyKind::Mac(ref mac) => { visitor.visit_mac(mac) } } } pub fn walk_path<'a, V: Visitor<'a>>(visitor: &mut V, path: &'a Path) { for segment in &path.segments { visitor.visit_path_segment(path.span, segment); } } pub fn walk_use_tree<'a, V: Visitor<'a>>( visitor: &mut V, use_tree: &'a UseTree, id: NodeId, ) { visitor.visit_path(&use_tree.prefix, id); match use_tree.kind { UseTreeKind::Simple(rename, ..) => { // the extra IDs are handled during HIR lowering if let Some(rename) = rename { visitor.visit_ident(rename); } } UseTreeKind::Glob => {}, UseTreeKind::Nested(ref use_trees) => { for &(ref nested_tree, nested_id) in use_trees { visitor.visit_use_tree(nested_tree, nested_id, true); } } } } pub fn walk_path_segment<'a, V: Visitor<'a>>(visitor: &mut V, path_span: Span, segment: &'a PathSegment) { visitor.visit_ident(segment.ident); if let Some(ref args) = segment.args { visitor.visit_generic_args(path_span, args); } } pub fn walk_generic_args<'a, V>(visitor: &mut V, _path_span: Span, generic_args: &'a GenericArgs) where V: Visitor<'a>, { match *generic_args { GenericArgs::AngleBracketed(ref data) => { walk_list!(visitor, visit_generic_arg, &data.args); walk_list!(visitor, visit_assoc_type_binding, &data.bindings); } GenericArgs::Parenthesized(ref data) => { walk_list!(visitor, visit_ty, &data.inputs); walk_list!(visitor, visit_ty, &data.output); } } } pub fn walk_assoc_type_binding<'a, V: Visitor<'a>>(visitor: &mut V, type_binding: &'a TypeBinding) { visitor.visit_ident(type_binding.ident); visitor.visit_ty(&type_binding.ty); } pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) { match pattern.node { PatKind::TupleStruct(ref path, ref children, _) => { visitor.visit_path(path, pattern.id); walk_list!(visitor, visit_pat, children); } PatKind::Path(ref opt_qself, ref path) => { if let Some(ref qself) = *opt_qself { visitor.visit_ty(&qself.ty); } visitor.visit_path(path, pattern.id) } PatKind::Struct(ref path, ref fields, _) => { visitor.visit_path(path, pattern.id); for field in fields { walk_list!(visitor, visit_attribute, field.node.attrs.iter()); visitor.visit_ident(field.node.ident); visitor.visit_pat(&field.node.pat) } } PatKind::Tuple(ref tuple_elements, _) => { walk_list!(visitor, visit_pat, tuple_elements); } PatKind::Box(ref subpattern) | PatKind::Ref(ref subpattern, _) | PatKind::Paren(ref subpattern) => { visitor.visit_pat(subpattern) } PatKind::Ident(_, ident, ref optional_subpattern) => { visitor.visit_ident(ident); walk_list!(visitor, visit_pat, optional_subpattern); } PatKind::Lit(ref expression) => visitor.visit_expr(expression), PatKind::Range(ref lower_bound, ref upper_bound, _) => { visitor.visit_expr(lower_bound); visitor.visit_expr(upper_bound); } PatKind::Wild => (), PatKind::Slice(ref prepatterns, ref slice_pattern, ref postpatterns) => { walk_list!(visitor, visit_pat, prepatterns); walk_list!(visitor, visit_pat, slice_pattern); walk_list!(visitor, visit_pat, postpatterns); } PatKind::Mac(ref mac) => visitor.visit_mac(mac), } } pub fn walk_foreign_item<'a, V: Visitor<'a>>(visitor: &mut V, foreign_item: &'a ForeignItem) { visitor.visit_vis(&foreign_item.vis); visitor.visit_ident(foreign_item.ident); match foreign_item.node { ForeignItemKind::Fn(ref function_declaration, ref generics) => { walk_fn_decl(visitor, function_declaration); visitor.visit_generics(generics) } ForeignItemKind::Static(ref typ, _) => visitor.visit_ty(typ), ForeignItemKind::Ty => (), ForeignItemKind::Macro(ref mac) => visitor.visit_mac(mac), } walk_list!(visitor, visit_attribute, &foreign_item.attrs); } pub fn walk_global_asm<'a, V: Visitor<'a>>(_: &mut V, _: &'a GlobalAsm) { // Empty! } pub fn walk_param_bound<'a, V: Visitor<'a>>(visitor: &mut V, bound: &'a GenericBound) { match *bound { GenericBound::Trait(ref typ, ref modifier) => visitor.visit_poly_trait_ref(typ, modifier), GenericBound::Outlives(ref lifetime) => visitor.visit_lifetime(lifetime), } } pub fn walk_generic_param<'a, V: Visitor<'a>>(visitor: &mut V, param: &'a GenericParam) { visitor.visit_ident(param.ident); walk_list!(visitor, visit_attribute, param.attrs.iter()); walk_list!(visitor, visit_param_bound, &param.bounds); match param.kind { GenericParamKind::Lifetime => {} GenericParamKind::Type { ref default } => walk_list!(visitor, visit_ty, default), } } pub fn walk_generics<'a, V: Visitor<'a>>(visitor: &mut V, generics: &'a Generics) { walk_list!(visitor, visit_generic_param, &generics.params); walk_list!(visitor, visit_where_predicate, &generics.where_clause.predicates); } pub fn walk_where_predicate<'a, V: Visitor<'a>>(visitor: &mut V, predicate: &'a WherePredicate) { match *predicate { WherePredicate::BoundPredicate(WhereBoundPredicate{ref bounded_ty, ref bounds, ref bound_generic_params, ..}) => { visitor.visit_ty(bounded_ty); walk_list!(visitor, visit_param_bound, bounds); walk_list!(visitor, visit_generic_param, bound_generic_params); } WherePredicate::RegionPredicate(WhereRegionPredicate{ref lifetime, ref bounds, ..}) => { visitor.visit_lifetime(lifetime); walk_list!(visitor, visit_param_bound, bounds); } WherePredicate::EqPredicate(WhereEqPredicate{ref lhs_ty, ref rhs_ty, ..}) => { visitor.visit_ty(lhs_ty); visitor.visit_ty(rhs_ty); } } } pub fn walk_fn_ret_ty<'a, V: Visitor<'a>>(visitor: &mut V, ret_ty: &'a FunctionRetTy) { if let FunctionRetTy::Ty(ref output_ty) = *ret_ty { visitor.visit_ty(output_ty) } } pub fn walk_fn_decl<'a, V: Visitor<'a>>(visitor: &mut V, function_declaration: &'a FnDecl) { for argument in &function_declaration.inputs { visitor.visit_pat(&argument.pat); visitor.visit_ty(&argument.ty) } visitor.visit_fn_ret_ty(&function_declaration.output) } pub fn walk_fn<'a, V>(visitor: &mut V, kind: FnKind<'a>, declaration: &'a FnDecl, _span: Span) where V: Visitor<'a>, { match kind { FnKind::ItemFn(_, _, _, body) => { walk_fn_decl(visitor, declaration); visitor.visit_block(body); } FnKind::Method(_, _, _, body) => { walk_fn_decl(visitor, declaration); visitor.visit_block(body); } FnKind::Closure(body) => { walk_fn_decl(visitor, declaration); visitor.visit_expr(body); } } } pub fn walk_trait_item<'a, V: Visitor<'a>>(visitor: &mut V, trait_item: &'a TraitItem) { visitor.visit_ident(trait_item.ident); walk_list!(visitor, visit_attribute, &trait_item.attrs); visitor.visit_generics(&trait_item.generics); match trait_item.node { TraitItemKind::Const(ref ty, ref default) => { visitor.visit_ty(ty); walk_list!(visitor, visit_expr, default); } TraitItemKind::Method(ref sig, None) => { walk_fn_decl(visitor, &sig.decl); } TraitItemKind::Method(ref sig, Some(ref body)) => { visitor.visit_fn(FnKind::Method(trait_item.ident, sig, None, body), &sig.decl, trait_item.span, trait_item.id); } TraitItemKind::Type(ref bounds, ref default) => { walk_list!(visitor, visit_param_bound, bounds); walk_list!(visitor, visit_ty, default); } TraitItemKind::Macro(ref mac) => { visitor.visit_mac(mac); } } } pub fn walk_impl_item<'a, V: Visitor<'a>>(visitor: &mut V, impl_item: &'a ImplItem) { visitor.visit_vis(&impl_item.vis); visitor.visit_ident(impl_item.ident); walk_list!(visitor, visit_attribute, &impl_item.attrs); visitor.visit_generics(&impl_item.generics); match impl_item.node { ImplItemKind::Const(ref ty, ref expr) => { visitor.visit_ty(ty); visitor.visit_expr(expr); } ImplItemKind::Method(ref sig, ref body) => { visitor.visit_fn(FnKind::Method(impl_item.ident, sig, Some(&impl_item.vis), body), &sig.decl, impl_item.span, impl_item.id); } ImplItemKind::Type(ref ty) => { visitor.visit_ty(ty); } ImplItemKind::Existential(ref bounds) => { walk_list!(visitor, visit_param_bound, bounds); } ImplItemKind::Macro(ref mac) => { visitor.visit_mac(mac); } } } pub fn walk_struct_def<'a, V: Visitor<'a>>(visitor: &mut V, struct_definition: &'a VariantData) { walk_list!(visitor, visit_struct_field, struct_definition.fields()); } pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) { visitor.visit_vis(&struct_field.vis); if let Some(ident) = struct_field.ident { visitor.visit_ident(ident); } visitor.visit_ty(&struct_field.ty); walk_list!(visitor, visit_attribute, &struct_field.attrs); } pub fn walk_block<'a, V: Visitor<'a>>(visitor: &mut V, block: &'a Block) { walk_list!(visitor, visit_stmt, &block.stmts); } pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) { match statement.node { StmtKind::Local(ref local) => visitor.visit_local(local), StmtKind::Item(ref item) => visitor.visit_item(item), StmtKind::Expr(ref expression) | StmtKind::Semi(ref expression) => { visitor.visit_expr(expression) } StmtKind::Mac(ref mac) => { let (ref mac, _, ref attrs) = **mac; visitor.visit_mac(mac); for attr in attrs.iter() { visitor.visit_attribute(attr); } } } } pub fn walk_mac<'a, V: Visitor<'a>>(_: &mut V, _: &Mac) { // Empty! } pub fn walk_anon_const<'a, V: Visitor<'a>>(visitor: &mut V, constant: &'a AnonConst) { visitor.visit_expr(&constant.value); } pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { for attr in expression.attrs.iter() { visitor.visit_attribute(attr); } match expression.node { ExprKind::Box(ref subexpression) => { visitor.visit_expr(subexpression) } ExprKind::ObsoleteInPlace(ref place, ref subexpression) => { visitor.visit_expr(place); visitor.visit_expr(subexpression) } ExprKind::Array(ref subexpressions) => { walk_list!(visitor, visit_expr, subexpressions); } ExprKind::Repeat(ref element, ref count) => { visitor.visit_expr(element); visitor.visit_anon_const(count) } ExprKind::Struct(ref path, ref fields, ref optional_base) => { visitor.visit_path(path, expression.id); for field in fields { walk_list!(visitor, visit_attribute, field.attrs.iter()); visitor.visit_ident(field.ident); visitor.visit_expr(&field.expr) } walk_list!(visitor, visit_expr, optional_base); } ExprKind::Tup(ref subexpressions) => { walk_list!(visitor, visit_expr, subexpressions); } ExprKind::Call(ref callee_expression, ref arguments) => { visitor.visit_expr(callee_expression); walk_list!(visitor, visit_expr, arguments); } ExprKind::MethodCall(ref segment, ref arguments) => { visitor.visit_path_segment(expression.span, segment); walk_list!(visitor, visit_expr, arguments); } ExprKind::Binary(_, ref left_expression, ref right_expression) => { visitor.visit_expr(left_expression); visitor.visit_expr(right_expression) } ExprKind::AddrOf(_, ref subexpression) | ExprKind::Unary(_, ref subexpression) => { visitor.visit_expr(subexpression) } ExprKind::Lit(_) => {} ExprKind::Cast(ref subexpression, ref typ) | ExprKind::Type(ref subexpression, ref typ) => { visitor.visit_expr(subexpression); visitor.visit_ty(typ) } ExprKind::If(ref head_expression, ref if_block, ref optional_else) => { visitor.visit_expr(head_expression); visitor.visit_block(if_block); walk_list!(visitor, visit_expr, optional_else); } ExprKind::While(ref subexpression, ref block, ref opt_label) => { walk_list!(visitor, visit_label, opt_label); visitor.visit_expr(subexpression); visitor.visit_block(block); } ExprKind::IfLet(ref pats, ref subexpression, ref if_block, ref optional_else) => { walk_list!(visitor, visit_pat, pats); visitor.visit_expr(subexpression); visitor.visit_block(if_block); walk_list!(visitor, visit_expr, optional_else); } ExprKind::WhileLet(ref pats, ref subexpression, ref block, ref opt_label) => { walk_list!(visitor, visit_label, opt_label); walk_list!(visitor, visit_pat, pats); visitor.visit_expr(subexpression); visitor.visit_block(block); } ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_label) => { walk_list!(visitor, visit_label, opt_label); visitor.visit_pat(pattern); visitor.visit_expr(subexpression); visitor.visit_block(block); } ExprKind::Loop(ref block, ref opt_label) => { walk_list!(visitor, visit_label, opt_label); visitor.visit_block(block); } ExprKind::Match(ref subexpression, ref arms) => { visitor.visit_expr(subexpression); walk_list!(visitor, visit_arm, arms); } ExprKind::Closure(_, _, _, ref function_declaration, ref body, _decl_span) => { visitor.visit_fn(FnKind::Closure(body), function_declaration, expression.span, expression.id) } ExprKind::Block(ref block, ref opt_label) => { walk_list!(visitor, visit_label, opt_label); visitor.visit_block(block); } ExprKind::Async(_, _, ref body) => { visitor.visit_block(body); } ExprKind::Assign(ref left_hand_expression, ref right_hand_expression) => { visitor.visit_expr(left_hand_expression); visitor.visit_expr(right_hand_expression); } ExprKind::AssignOp(_, ref left_expression, ref right_expression) => { visitor.visit_expr(left_expression); visitor.visit_expr(right_expression); } ExprKind::Field(ref subexpression, ident) => { visitor.visit_expr(subexpression); visitor.visit_ident(ident); } ExprKind::Index(ref main_expression, ref index_expression) => { visitor.visit_expr(main_expression); visitor.visit_expr(index_expression) } ExprKind::Range(ref start, ref end, _) => { walk_list!(visitor, visit_expr, start); walk_list!(visitor, visit_expr, end); } ExprKind::Path(ref maybe_qself, ref path) => { if let Some(ref qself) = *maybe_qself { visitor.visit_ty(&qself.ty); } visitor.visit_path(path, expression.id) } ExprKind::Break(ref opt_label, ref opt_expr) => { walk_list!(visitor, visit_label, opt_label); walk_list!(visitor, visit_expr, opt_expr); } ExprKind::Continue(ref opt_label) => { walk_list!(visitor, visit_label, opt_label); } ExprKind::Ret(ref optional_expression) => { walk_list!(visitor, visit_expr, optional_expression); } ExprKind::Mac(ref mac) => visitor.visit_mac(mac), ExprKind::Paren(ref subexpression) => { visitor.visit_expr(subexpression) } ExprKind::InlineAsm(ref ia) => { for &(_, ref input) in &ia.inputs { visitor.visit_expr(input) } for output in &ia.outputs { visitor.visit_expr(&output.expr) } } ExprKind::Yield(ref optional_expression) => { walk_list!(visitor, visit_expr, optional_expression); } ExprKind::Try(ref subexpression) => { visitor.visit_expr(subexpression) } ExprKind::Catch(ref body) => { visitor.visit_block(body) } } visitor.visit_expr_post(expression) } pub fn walk_arm<'a, V: Visitor<'a>>(visitor: &mut V, arm: &'a Arm) { walk_list!(visitor, visit_pat, &arm.pats); walk_list!(visitor, visit_expr, &arm.guard); visitor.visit_expr(&arm.body); walk_list!(visitor, visit_attribute, &arm.attrs); } pub fn walk_vis<'a, V: Visitor<'a>>(visitor: &mut V, vis: &'a Visibility) { if let VisibilityKind::Restricted { ref path, id } = vis.node { visitor.visit_path(path, id); } } pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) { visitor.visit_tts(attr.tokens.clone()); } pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) { match tt { TokenTree::Token(_, tok) => visitor.visit_token(tok), TokenTree::Delimited(_, delimed) => visitor.visit_tts(delimed.stream()), } } pub fn walk_tts<'a, V: Visitor<'a>>(visitor: &mut V, tts: TokenStream) { for tt in tts.trees() { visitor.visit_tt(tt); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/str.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[inline] pub fn char_at(s: &str, byte: usize) -> char { s[byte..].chars().next().unwrap() }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/std_inject.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast; use attr; use std::cell::Cell; use std::iter; use edition::Edition; use ext::hygiene::{Mark, SyntaxContext}; use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan}; use ptr::P; use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's /// call to source_map's `is_internal` check. /// The expanded code uses the unstable `#[prelude_import]` attribute. fn ignored_span(sp: Span) -> Span { let mark = Mark::fresh(Mark::root()); mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, def_site: None, format: MacroAttribute(Symbol::intern("std_inject")), allow_internal_unstable: true, allow_internal_unsafe: false, local_inner_macros: false, edition: hygiene::default_edition(), }); sp.with_ctxt(SyntaxContext::empty().apply_mark(mark)) } pub fn injected_crate_name() -> Option<&'static str> { INJECTED_CRATE_NAME.with(|name| name.get()) } thread_local! { static INJECTED_CRATE_NAME: Cell<Option<&'static str>> = Cell::new(None); } pub fn maybe_inject_crates_ref( mut krate: ast::Crate, alt_std_name: Option<&str>, edition: Edition, ) -> ast::Crate { let rust_2018 = edition >= Edition::Edition2018; // the first name in this list is the crate name of the crate with the prelude let names: &[&str] = if attr::contains_name(&krate.attrs, "no_core") { return krate; } else if attr::contains_name(&krate.attrs, "no_std") { if attr::contains_name(&krate.attrs, "compiler_builtins") { &["core"] } else { &["core", "compiler_builtins"] } } else { &["std"] }; // .rev() to preserve ordering above in combination with insert(0, ...) let alt_std_name = alt_std_name.map(Symbol::intern); for orig_name in names.iter().rev() { let orig_name = Symbol::intern(orig_name); let mut rename = orig_name; // HACK(eddyb) gensym the injected crates on the Rust 2018 edition, // so they don't accidentally interfere with the new import paths. if rust_2018 { rename = orig_name.gensymed(); } let orig_name = if rename != orig_name { Some(orig_name) } else { None }; krate.module.items.insert(0, P(ast::Item { attrs: vec![attr::mk_attr_outer(DUMMY_SP, attr::mk_attr_id(), attr::mk_word_item(ast::Ident::from_str("macro_use")))], vis: dummy_spanned(ast::VisibilityKind::Inherited), node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)), ident: ast::Ident::with_empty_ctxt(rename), id: ast::DUMMY_NODE_ID, span: DUMMY_SP, tokens: None, })); } // the crates have been injected, the assumption is that the first one is the one with // the prelude. let name = names[0]; INJECTED_CRATE_NAME.with(|opt_name| opt_name.set(Some(name))); let span = ignored_span(DUMMY_SP); krate.module.items.insert(0, P(ast::Item { attrs: vec![ast::Attribute { style: ast::AttrStyle::Outer, path: ast::Path::from_ident(ast::Ident::new(Symbol::intern("prelude_import"), span)), tokens: TokenStream::empty(), id: attr::mk_attr_id(), is_sugared_doc: false, span, }], vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited), node: ast::ItemKind::Use(P(ast::UseTree { prefix: ast::Path { segments: iter::once(keywords::CrateRoot.ident()) .chain( [name, "prelude", "v1"].iter().cloned() .map(ast::Ident::from_str) ).map(ast::PathSegment::from_ident).collect(), span, }, kind: ast::UseTreeKind::Glob, span, })), id: ast::DUMMY_NODE_ID, ident: keywords::Invalid.ident(), span, tokens: None, })); krate }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/build.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn main() { println!("cargo:rerun-if-changed=build.rs"); println!("cargo:rerun-if-env-changed=CFG_RELEASE_CHANNEL"); println!("cargo:rerun-if-env-changed=CFG_DISABLE_UNSTABLE_FEATURES"); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/json.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A JSON emitter for errors. //! //! This works by converting errors to a simplified structural format (see the //! structs at the start of the file) and then serializing them. These should //! contain as much information about the error as possible. //! //! The format of the JSON output should be considered *unstable*. For now the //! structs at the end of this file (Diagnostic*) specify the error format. // FIXME spec the JSON output properly. use source_map::{SourceMap, FilePathMapping}; use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan}; use errors::registry::Registry; use errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper}; use errors::{DiagnosticId, Applicability}; use errors::emitter::{Emitter, EmitterWriter}; use rustc_data_structures::sync::{self, Lrc}; use std::io::{self, Write}; use std::vec; use std::sync::{Arc, Mutex}; use rustc_serialize::json::{as_json, as_pretty_json}; pub struct JsonEmitter { dst: Box<dyn Write + Send>, registry: Option<Registry>, cm: Lrc<dyn SourceMapper + sync::Send + sync::Sync>, pretty: bool, ui_testing: bool, } impl JsonEmitter { pub fn stderr(registry: Option<Registry>, code_map: Lrc<SourceMap>, pretty: bool) -> JsonEmitter { JsonEmitter { dst: Box::new(io::stderr()), registry, cm: code_map, pretty, ui_testing: false, } } pub fn basic(pretty: bool) -> JsonEmitter { let file_path_mapping = FilePathMapping::empty(); JsonEmitter::stderr(None, Lrc::new(SourceMap::new(file_path_mapping)), pretty) } pub fn new(dst: Box<dyn Write + Send>, registry: Option<Registry>, code_map: Lrc<SourceMap>, pretty: bool) -> JsonEmitter { JsonEmitter { dst, registry, cm: code_map, pretty, ui_testing: false, } } pub fn ui_testing(self, ui_testing: bool) -> Self { Self { ui_testing, ..self } } } impl Emitter for JsonEmitter { fn emit(&mut self, db: &DiagnosticBuilder) { let data = Diagnostic::from_diagnostic_builder(db, self); let result = if self.pretty { writeln!(&mut self.dst, "{}", as_pretty_json(&data)) } else { writeln!(&mut self.dst, "{}", as_json(&data)) }; if let Err(e) = result { panic!("failed to print diagnostics: {:?}", e); } } } // The following data types are provided just for serialisation. #[derive(RustcEncodable)] struct Diagnostic { /// The primary error message. message: String, code: Option<DiagnosticCode>, /// "error: internal compiler error", "error", "warning", "note", "help". level: &'static str, spans: Vec<DiagnosticSpan>, /// Associated diagnostic messages. children: Vec<Diagnostic>, /// The message as rustc would render it. rendered: Option<String>, } #[derive(RustcEncodable)] #[allow(unused_attributes)] struct DiagnosticSpan { file_name: String, byte_start: u32, byte_end: u32, /// 1-based. line_start: usize, line_end: usize, /// 1-based, character offset. column_start: usize, column_end: usize, /// Is this a "primary" span -- meaning the point, or one of the points, /// where the error occurred? is_primary: bool, /// Source text from the start of line_start to the end of line_end. text: Vec<DiagnosticSpanLine>, /// Label that should be placed at this location (if any) label: Option<String>, /// If we are suggesting a replacement, this will contain text /// that should be sliced in atop this span. suggested_replacement: Option<String>, /// If the suggestion is approximate suggestion_applicability: Option<Applicability>, /// Macro invocations that created the code at this span, if any. expansion: Option<Box<DiagnosticSpanMacroExpansion>>, } #[derive(RustcEncodable)] struct DiagnosticSpanLine { text: String, /// 1-based, character offset in self.text. highlight_start: usize, highlight_end: usize, } #[derive(RustcEncodable)] struct DiagnosticSpanMacroExpansion { /// span where macro was applied to generate this code; note that /// this may itself derive from a macro (if /// `span.expansion.is_some()`) span: DiagnosticSpan, /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]") macro_decl_name: String, /// span where macro was defined (if known) def_site_span: Option<DiagnosticSpan>, } #[derive(RustcEncodable)] struct DiagnosticCode { /// The code itself. code: String, /// An explanation for the code. explanation: Option<&'static str>, } impl Diagnostic { fn from_diagnostic_builder(db: &DiagnosticBuilder, je: &JsonEmitter) -> Diagnostic { let sugg = db.suggestions.iter().map(|sugg| { Diagnostic { message: sugg.msg.clone(), code: None, level: "help", spans: DiagnosticSpan::from_suggestion(sugg, je), children: vec![], rendered: None, } }); // generate regular command line output and store it in the json // A threadsafe buffer for writing. #[derive(Default, Clone)] struct BufWriter(Arc<Mutex<Vec<u8>>>); impl Write for BufWriter { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.lock().unwrap().write(buf) } fn flush(&mut self) -> io::Result<()> { self.0.lock().unwrap().flush() } } let buf = BufWriter::default(); let output = buf.clone(); EmitterWriter::new(Box::new(buf), Some(je.cm.clone()), false, false) .ui_testing(je.ui_testing).emit(db); let output = Arc::try_unwrap(output.0).unwrap().into_inner().unwrap(); let output = String::from_utf8(output).unwrap(); Diagnostic { message: db.message(), code: DiagnosticCode::map_opt_string(db.code.clone(), je), level: db.level.to_str(), spans: DiagnosticSpan::from_multispan(&db.span, je), children: db.children.iter().map(|c| { Diagnostic::from_sub_diagnostic(c, je) }).chain(sugg).collect(), rendered: Some(output), } } fn from_sub_diagnostic(db: &SubDiagnostic, je: &JsonEmitter) -> Diagnostic { Diagnostic { message: db.message(), code: None, level: db.level.to_str(), spans: db.render_span.as_ref() .map(|sp| DiagnosticSpan::from_multispan(sp, je)) .unwrap_or_else(|| DiagnosticSpan::from_multispan(&db.span, je)), children: vec![], rendered: None, } } } impl DiagnosticSpan { fn from_span_label(span: SpanLabel, suggestion: Option<(&String, Applicability)>, je: &JsonEmitter) -> DiagnosticSpan { Self::from_span_etc(span.span, span.is_primary, span.label, suggestion, je) } fn from_span_etc(span: Span, is_primary: bool, label: Option<String>, suggestion: Option<(&String, Applicability)>, je: &JsonEmitter) -> DiagnosticSpan { // obtain the full backtrace from the `macro_backtrace` // helper; in some ways, it'd be better to expand the // backtrace ourselves, but the `macro_backtrace` helper makes // some decision, such as dropping some frames, and I don't // want to duplicate that logic here. let backtrace = span.macro_backtrace().into_iter(); DiagnosticSpan::from_span_full(span, is_primary, label, suggestion, backtrace, je) } fn from_span_full(span: Span, is_primary: bool, label: Option<String>, suggestion: Option<(&String, Applicability)>, mut backtrace: vec::IntoIter<MacroBacktrace>, je: &JsonEmitter) -> DiagnosticSpan { let start = je.cm.lookup_char_pos(span.lo()); let end = je.cm.lookup_char_pos(span.hi()); let backtrace_step = backtrace.next().map(|bt| { let call_site = Self::from_span_full(bt.call_site, false, None, None, backtrace, je); let def_site_span = bt.def_site_span.map(|sp| { Self::from_span_full(sp, false, None, None, vec![].into_iter(), je) }); Box::new(DiagnosticSpanMacroExpansion { span: call_site, macro_decl_name: bt.macro_decl_name, def_site_span, }) }); DiagnosticSpan { file_name: start.file.name.to_string(), byte_start: span.lo().0 - start.file.start_pos.0, byte_end: span.hi().0 - start.file.start_pos.0, line_start: start.line, line_end: end.line, column_start: start.col.0 + 1, column_end: end.col.0 + 1, is_primary, text: DiagnosticSpanLine::from_span(span, je), suggested_replacement: suggestion.map(|x| x.0.clone()), suggestion_applicability: suggestion.map(|x| x.1), expansion: backtrace_step, label, } } fn from_multispan(msp: &MultiSpan, je: &JsonEmitter) -> Vec<DiagnosticSpan> { msp.span_labels() .into_iter() .map(|span_str| Self::from_span_label(span_str, None, je)) .collect() } fn from_suggestion(suggestion: &CodeSuggestion, je: &JsonEmitter) -> Vec<DiagnosticSpan> { suggestion.substitutions .iter() .flat_map(|substitution| { substitution.parts.iter().map(move |suggestion_inner| { let span_label = SpanLabel { span: suggestion_inner.span, is_primary: true, label: None, }; DiagnosticSpan::from_span_label(span_label, Some((&suggestion_inner.snippet, suggestion.applicability)), je) }) }) .collect() } } impl DiagnosticSpanLine { fn line_from_source_file(fm: &syntax_pos::SourceFile, index: usize, h_start: usize, h_end: usize) -> DiagnosticSpanLine { DiagnosticSpanLine { text: fm.get_line(index).map_or(String::new(), |l| l.into_owned()), highlight_start: h_start, highlight_end: h_end, } } /// Create a list of DiagnosticSpanLines from span - each line with any part /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the /// `span` within the line. fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> { je.cm.span_to_lines(span) .map(|lines| { let fm = &*lines.file; lines.lines .iter() .map(|line| { DiagnosticSpanLine::line_from_source_file(fm, line.line_index, line.start_col.0 + 1, line.end_col.0 + 1) }) .collect() }) .unwrap_or_else(|_| vec![]) } } impl DiagnosticCode { fn map_opt_string(s: Option<DiagnosticId>, je: &JsonEmitter) -> Option<DiagnosticCode> { s.map(|s| { let s = match s { DiagnosticId::Error(s) => s, DiagnosticId::Lint(s) => s, }; let explanation = je.registry .as_ref() .and_then(|registry| registry.find_description(&s)); DiagnosticCode { code: s, explanation, } }) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/fold.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A Folder represents an AST->AST fold; it accepts an AST piece, //! and returns a piece of the same type. So, for instance, macro //! expansion is a Folder that walks over an AST and produces another //! AST. //! //! Note: using a Folder (other than the MacroExpander Folder) on //! an AST before macro expansion is probably a bad idea. For instance, //! a folder renaming item names in a module will miss all of those //! that are created by the expansion of a macro. use ast::*; use ast; use syntax_pos::Span; use source_map::{Spanned, respan}; use parse::token::{self, Token}; use ptr::P; use OneVector; use symbol::keywords; use ThinVec; use tokenstream::*; use util::move_map::MoveMap; use rustc_data_structures::sync::Lrc; use rustc_data_structures::small_vec::ExpectOne; pub trait Folder : Sized { // Any additions to this trait should happen in form // of a call to a public `noop_*` function that only calls // out to the folder again, not other `noop_*` functions. // // This is a necessary API workaround to the problem of not // being able to call out to the super default method // in an overridden default method. fn fold_crate(&mut self, c: Crate) -> Crate { noop_fold_crate(c, self) } fn fold_meta_items(&mut self, meta_items: Vec<MetaItem>) -> Vec<MetaItem> { noop_fold_meta_items(meta_items, self) } fn fold_meta_list_item(&mut self, list_item: NestedMetaItem) -> NestedMetaItem { noop_fold_meta_list_item(list_item, self) } fn fold_meta_item(&mut self, meta_item: MetaItem) -> MetaItem { noop_fold_meta_item(meta_item, self) } fn fold_use_tree(&mut self, use_tree: UseTree) -> UseTree { noop_fold_use_tree(use_tree, self) } fn fold_foreign_item(&mut self, ni: ForeignItem) -> OneVector<ForeignItem> { noop_fold_foreign_item(ni, self) } fn fold_foreign_item_simple(&mut self, ni: ForeignItem) -> ForeignItem { noop_fold_foreign_item_simple(ni, self) } fn fold_item(&mut self, i: P<Item>) -> OneVector<P<Item>> { noop_fold_item(i, self) } fn fold_item_simple(&mut self, i: Item) -> Item { noop_fold_item_simple(i, self) } fn fold_fn_header(&mut self, header: FnHeader) -> FnHeader { noop_fold_fn_header(header, self) } fn fold_struct_field(&mut self, sf: StructField) -> StructField { noop_fold_struct_field(sf, self) } fn fold_item_kind(&mut self, i: ItemKind) -> ItemKind { noop_fold_item_kind(i, self) } fn fold_trait_item(&mut self, i: TraitItem) -> OneVector<TraitItem> { noop_fold_trait_item(i, self) } fn fold_impl_item(&mut self, i: ImplItem) -> OneVector<ImplItem> { noop_fold_impl_item(i, self) } fn fold_fn_decl(&mut self, d: P<FnDecl>) -> P<FnDecl> { noop_fold_fn_decl(d, self) } fn fold_asyncness(&mut self, a: IsAsync) -> IsAsync { noop_fold_asyncness(a, self) } fn fold_block(&mut self, b: P<Block>) -> P<Block> { noop_fold_block(b, self) } fn fold_stmt(&mut self, s: Stmt) -> OneVector<Stmt> { noop_fold_stmt(s, self) } fn fold_arm(&mut self, a: Arm) -> Arm { noop_fold_arm(a, self) } fn fold_pat(&mut self, p: P<Pat>) -> P<Pat> { noop_fold_pat(p, self) } fn fold_anon_const(&mut self, c: AnonConst) -> AnonConst { noop_fold_anon_const(c, self) } fn fold_expr(&mut self, e: P<Expr>) -> P<Expr> { e.map(|e| noop_fold_expr(e, self)) } fn fold_range_end(&mut self, re: RangeEnd) -> RangeEnd { noop_fold_range_end(re, self) } fn fold_opt_expr(&mut self, e: P<Expr>) -> Option<P<Expr>> { noop_fold_opt_expr(e, self) } fn fold_exprs(&mut self, es: Vec<P<Expr>>) -> Vec<P<Expr>> { noop_fold_exprs(es, self) } fn fold_generic_arg(&mut self, arg: GenericArg) -> GenericArg { match arg { GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.fold_lifetime(lt)), GenericArg::Type(ty) => GenericArg::Type(self.fold_ty(ty)), } } fn fold_ty(&mut self, t: P<Ty>) -> P<Ty> { noop_fold_ty(t, self) } fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { noop_fold_lifetime(l, self) } fn fold_ty_binding(&mut self, t: TypeBinding) -> TypeBinding { noop_fold_ty_binding(t, self) } fn fold_mod(&mut self, m: Mod) -> Mod { noop_fold_mod(m, self) } fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod { noop_fold_foreign_mod(nm, self) } fn fold_global_asm(&mut self, ga: P<GlobalAsm>) -> P<GlobalAsm> { noop_fold_global_asm(ga, self) } fn fold_variant(&mut self, v: Variant) -> Variant { noop_fold_variant(v, self) } fn fold_ident(&mut self, i: Ident) -> Ident { noop_fold_ident(i, self) } fn fold_usize(&mut self, i: usize) -> usize { noop_fold_usize(i, self) } fn fold_path(&mut self, p: Path) -> Path { noop_fold_path(p, self) } fn fold_qpath(&mut self, qs: Option<QSelf>, p: Path) -> (Option<QSelf>, Path) { noop_fold_qpath(qs, p, self) } fn fold_generic_args(&mut self, p: GenericArgs) -> GenericArgs { noop_fold_generic_args(p, self) } fn fold_angle_bracketed_parameter_data(&mut self, p: AngleBracketedArgs) -> AngleBracketedArgs { noop_fold_angle_bracketed_parameter_data(p, self) } fn fold_parenthesized_parameter_data(&mut self, p: ParenthesisedArgs) -> ParenthesisedArgs { noop_fold_parenthesized_parameter_data(p, self) } fn fold_local(&mut self, l: P<Local>) -> P<Local> { noop_fold_local(l, self) } fn fold_mac(&mut self, _mac: Mac) -> Mac { panic!("fold_mac disabled by default"); // NB: see note about macros above. // if you really want a folder that // works on macros, use this // definition in your trait impl: // fold::noop_fold_mac(_mac, self) } fn fold_macro_def(&mut self, def: MacroDef) -> MacroDef { noop_fold_macro_def(def, self) } fn fold_label(&mut self, label: Label) -> Label { noop_fold_label(label, self) } fn fold_attribute(&mut self, at: Attribute) -> Option<Attribute> { noop_fold_attribute(at, self) } fn fold_arg(&mut self, a: Arg) -> Arg { noop_fold_arg(a, self) } fn fold_generics(&mut self, generics: Generics) -> Generics { noop_fold_generics(generics, self) } fn fold_trait_ref(&mut self, p: TraitRef) -> TraitRef { noop_fold_trait_ref(p, self) } fn fold_poly_trait_ref(&mut self, p: PolyTraitRef) -> PolyTraitRef { noop_fold_poly_trait_ref(p, self) } fn fold_variant_data(&mut self, vdata: VariantData) -> VariantData { noop_fold_variant_data(vdata, self) } fn fold_generic_param(&mut self, param: GenericParam) -> GenericParam { noop_fold_generic_param(param, self) } fn fold_generic_params(&mut self, params: Vec<GenericParam>) -> Vec<GenericParam> { noop_fold_generic_params(params, self) } fn fold_tt(&mut self, tt: TokenTree) -> TokenTree { noop_fold_tt(tt, self) } fn fold_tts(&mut self, tts: TokenStream) -> TokenStream { noop_fold_tts(tts, self) } fn fold_token(&mut self, t: token::Token) -> token::Token { noop_fold_token(t, self) } fn fold_interpolated(&mut self, nt: token::Nonterminal) -> token::Nonterminal { noop_fold_interpolated(nt, self) } fn fold_opt_bounds(&mut self, b: Option<GenericBounds>) -> Option<GenericBounds> { noop_fold_opt_bounds(b, self) } fn fold_bounds(&mut self, b: GenericBounds) -> GenericBounds { noop_fold_bounds(b, self) } fn fold_param_bound(&mut self, tpb: GenericBound) -> GenericBound { noop_fold_param_bound(tpb, self) } fn fold_mt(&mut self, mt: MutTy) -> MutTy { noop_fold_mt(mt, self) } fn fold_field(&mut self, field: Field) -> Field { noop_fold_field(field, self) } fn fold_where_clause(&mut self, where_clause: WhereClause) -> WhereClause { noop_fold_where_clause(where_clause, self) } fn fold_where_predicate(&mut self, where_predicate: WherePredicate) -> WherePredicate { noop_fold_where_predicate(where_predicate, self) } fn fold_vis(&mut self, vis: Visibility) -> Visibility { noop_fold_vis(vis, self) } fn new_id(&mut self, i: NodeId) -> NodeId { i } fn new_span(&mut self, sp: Span) -> Span { sp } } pub fn noop_fold_meta_items<T: Folder>(meta_items: Vec<MetaItem>, fld: &mut T) -> Vec<MetaItem> { meta_items.move_map(|x| fld.fold_meta_item(x)) } pub fn noop_fold_use_tree<T: Folder>(use_tree: UseTree, fld: &mut T) -> UseTree { UseTree { span: fld.new_span(use_tree.span), prefix: fld.fold_path(use_tree.prefix), kind: match use_tree.kind { UseTreeKind::Simple(rename, id1, id2) => UseTreeKind::Simple(rename.map(|ident| fld.fold_ident(ident)), fld.new_id(id1), fld.new_id(id2)), UseTreeKind::Glob => UseTreeKind::Glob, UseTreeKind::Nested(items) => UseTreeKind::Nested(items.move_map(|(tree, id)| { (fld.fold_use_tree(tree), fld.new_id(id)) })), }, } } pub fn fold_attrs<T: Folder>(attrs: Vec<Attribute>, fld: &mut T) -> Vec<Attribute> { attrs.move_flat_map(|x| fld.fold_attribute(x)) } pub fn fold_thin_attrs<T: Folder>(attrs: ThinVec<Attribute>, fld: &mut T) -> ThinVec<Attribute> { fold_attrs(attrs.into(), fld).into() } pub fn noop_fold_arm<T: Folder>(Arm {attrs, pats, guard, body}: Arm, fld: &mut T) -> Arm { Arm { attrs: fold_attrs(attrs, fld), pats: pats.move_map(|x| fld.fold_pat(x)), guard: guard.map(|x| fld.fold_expr(x)), body: fld.fold_expr(body), } } pub fn noop_fold_ty_binding<T: Folder>(b: TypeBinding, fld: &mut T) -> TypeBinding { TypeBinding { id: fld.new_id(b.id), ident: fld.fold_ident(b.ident), ty: fld.fold_ty(b.ty), span: fld.new_span(b.span), } } pub fn noop_fold_ty<T: Folder>(t: P<Ty>, fld: &mut T) -> P<Ty> { t.map(|Ty {id, node, span}| Ty { id: fld.new_id(id), node: match node { TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => node, TyKind::Slice(ty) => TyKind::Slice(fld.fold_ty(ty)), TyKind::Ptr(mt) => TyKind::Ptr(fld.fold_mt(mt)), TyKind::Rptr(region, mt) => { TyKind::Rptr(region.map(|lt| noop_fold_lifetime(lt, fld)), fld.fold_mt(mt)) } TyKind::BareFn(f) => { TyKind::BareFn(f.map(|BareFnTy {generic_params, unsafety, abi, decl}| BareFnTy { generic_params: fld.fold_generic_params(generic_params), unsafety, abi, decl: fld.fold_fn_decl(decl) })) } TyKind::Never => node, TyKind::Tup(tys) => TyKind::Tup(tys.move_map(|ty| fld.fold_ty(ty))), TyKind::Paren(ty) => TyKind::Paren(fld.fold_ty(ty)), TyKind::Path(qself, path) => { let (qself, path) = fld.fold_qpath(qself, path); TyKind::Path(qself, path) } TyKind::Array(ty, length) => { TyKind::Array(fld.fold_ty(ty), fld.fold_anon_const(length)) } TyKind::Typeof(expr) => { TyKind::Typeof(fld.fold_anon_const(expr)) } TyKind::TraitObject(bounds, syntax) => { TyKind::TraitObject(bounds.move_map(|b| fld.fold_param_bound(b)), syntax) } TyKind::ImplTrait(id, bounds) => { TyKind::ImplTrait(fld.new_id(id), bounds.move_map(|b| fld.fold_param_bound(b))) } TyKind::Mac(mac) => { TyKind::Mac(fld.fold_mac(mac)) } }, span: fld.new_span(span) }) } pub fn noop_fold_foreign_mod<T: Folder>(ForeignMod {abi, items}: ForeignMod, fld: &mut T) -> ForeignMod { ForeignMod { abi, items: items.move_flat_map(|x| fld.fold_foreign_item(x)), } } pub fn noop_fold_global_asm<T: Folder>(ga: P<GlobalAsm>, _: &mut T) -> P<GlobalAsm> { ga } pub fn noop_fold_variant<T: Folder>(v: Variant, fld: &mut T) -> Variant { Spanned { node: Variant_ { ident: fld.fold_ident(v.node.ident), attrs: fold_attrs(v.node.attrs, fld), data: fld.fold_variant_data(v.node.data), disr_expr: v.node.disr_expr.map(|e| fld.fold_anon_const(e)), }, span: fld.new_span(v.span), } } pub fn noop_fold_ident<T: Folder>(ident: Ident, fld: &mut T) -> Ident { Ident::new(ident.name, fld.new_span(ident.span)) } pub fn noop_fold_usize<T: Folder>(i: usize, _: &mut T) -> usize { i } pub fn noop_fold_path<T: Folder>(Path { segments, span }: Path, fld: &mut T) -> Path { Path { segments: segments.move_map(|PathSegment { ident, args }| PathSegment { ident: fld.fold_ident(ident), args: args.map(|args| args.map(|args| fld.fold_generic_args(args))), }), span: fld.new_span(span) } } pub fn noop_fold_qpath<T: Folder>(qself: Option<QSelf>, path: Path, fld: &mut T) -> (Option<QSelf>, Path) { let qself = qself.map(|QSelf { ty, path_span, position }| { QSelf { ty: fld.fold_ty(ty), path_span: fld.new_span(path_span), position, } }); (qself, fld.fold_path(path)) } pub fn noop_fold_generic_args<T: Folder>(generic_args: GenericArgs, fld: &mut T) -> GenericArgs { match generic_args { GenericArgs::AngleBracketed(data) => { GenericArgs::AngleBracketed(fld.fold_angle_bracketed_parameter_data(data)) } GenericArgs::Parenthesized(data) => { GenericArgs::Parenthesized(fld.fold_parenthesized_parameter_data(data)) } } } pub fn noop_fold_angle_bracketed_parameter_data<T: Folder>(data: AngleBracketedArgs, fld: &mut T) -> AngleBracketedArgs { let AngleBracketedArgs { args, bindings, span } = data; AngleBracketedArgs { args: args.move_map(|arg| fld.fold_generic_arg(arg)), bindings: bindings.move_map(|b| fld.fold_ty_binding(b)), span: fld.new_span(span) } } pub fn noop_fold_parenthesized_parameter_data<T: Folder>(data: ParenthesisedArgs, fld: &mut T) -> ParenthesisedArgs { let ParenthesisedArgs { inputs, output, span } = data; ParenthesisedArgs { inputs: inputs.move_map(|ty| fld.fold_ty(ty)), output: output.map(|ty| fld.fold_ty(ty)), span: fld.new_span(span) } } pub fn noop_fold_local<T: Folder>(l: P<Local>, fld: &mut T) -> P<Local> { l.map(|Local {id, pat, ty, init, span, attrs}| Local { id: fld.new_id(id), pat: fld.fold_pat(pat), ty: ty.map(|t| fld.fold_ty(t)), init: init.map(|e| fld.fold_expr(e)), span: fld.new_span(span), attrs: fold_attrs(attrs.into(), fld).into(), }) } pub fn noop_fold_attribute<T: Folder>(attr: Attribute, fld: &mut T) -> Option<Attribute> { Some(Attribute { id: attr.id, style: attr.style, path: fld.fold_path(attr.path), tokens: fld.fold_tts(attr.tokens), is_sugared_doc: attr.is_sugared_doc, span: fld.new_span(attr.span), }) } pub fn noop_fold_mac<T: Folder>(Spanned {node, span}: Mac, fld: &mut T) -> Mac { Spanned { node: Mac_ { tts: fld.fold_tts(node.stream()).into(), path: fld.fold_path(node.path), delim: node.delim, }, span: fld.new_span(span) } } pub fn noop_fold_macro_def<T: Folder>(def: MacroDef, fld: &mut T) -> MacroDef { MacroDef { tokens: fld.fold_tts(def.tokens.into()).into(), legacy: def.legacy, } } pub fn noop_fold_meta_list_item<T: Folder>(li: NestedMetaItem, fld: &mut T) -> NestedMetaItem { Spanned { node: match li.node { NestedMetaItemKind::MetaItem(mi) => { NestedMetaItemKind::MetaItem(fld.fold_meta_item(mi)) }, NestedMetaItemKind::Literal(lit) => NestedMetaItemKind::Literal(lit) }, span: fld.new_span(li.span) } } pub fn noop_fold_meta_item<T: Folder>(mi: MetaItem, fld: &mut T) -> MetaItem { MetaItem { ident: mi.ident, node: match mi.node { MetaItemKind::Word => MetaItemKind::Word, MetaItemKind::List(mis) => { MetaItemKind::List(mis.move_map(|e| fld.fold_meta_list_item(e))) }, MetaItemKind::NameValue(s) => MetaItemKind::NameValue(s), }, span: fld.new_span(mi.span) } } pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { Arg { id: fld.new_id(id), pat: fld.fold_pat(pat), ty: fld.fold_ty(ty) } } pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree { match tt { TokenTree::Token(span, tok) => TokenTree::Token(fld.new_span(span), fld.fold_token(tok)), TokenTree::Delimited(span, delimed) => TokenTree::Delimited(fld.new_span(span), Delimited { tts: fld.fold_tts(delimed.stream()).into(), delim: delimed.delim, }), } } pub fn noop_fold_tts<T: Folder>(tts: TokenStream, fld: &mut T) -> TokenStream { tts.map(|tt| fld.fold_tt(tt)) } // apply ident folder if it's an ident, apply other folds to interpolated nodes pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token { match t { token::Ident(id, is_raw) => token::Ident(fld.fold_ident(id), is_raw), token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)), token::Interpolated(nt) => { let nt = match Lrc::try_unwrap(nt) { Ok(nt) => nt, Err(nt) => (*nt).clone(), }; Token::interpolated(fld.fold_interpolated(nt.0)) } _ => t } } /// apply folder to elements of interpolated nodes // // NB: this can occur only when applying a fold to partially expanded code, where // parsed pieces have gotten implanted ito *other* macro invocations. This is relevant // for macro hygiene, but possibly not elsewhere. // // One problem here occurs because the types for fold_item, fold_stmt, etc. allow the // folder to return *multiple* items; this is a problem for the nodes here, because // they insist on having exactly one piece. One solution would be to mangle the fold // trait to include one-to-many and one-to-one versions of these entry points, but that // would probably confuse a lot of people and help very few. Instead, I'm just going // to put in dynamic checks. I think the performance impact of this will be pretty much // nonexistent. The danger is that someone will apply a fold to a partially expanded // node, and will be confused by the fact that their "fold_item" or "fold_stmt" isn't // getting called on NtItem or NtStmt nodes. Hopefully they'll wind up reading this // comment, and doing something appropriate. // // BTW, design choice: I considered just changing the type of, e.g., NtItem to contain // multiple items, but decided against it when I looked at parse_item_or_view_item and // tried to figure out what I would do with multiple items there.... pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T) -> token::Nonterminal { match nt { token::NtItem(item) => token::NtItem(fld.fold_item(item) // this is probably okay, because the only folds likely // to peek inside interpolated nodes will be renamings/markings, // which map single items to single items .expect_one("expected fold to produce exactly one item")), token::NtBlock(block) => token::NtBlock(fld.fold_block(block)), token::NtStmt(stmt) => token::NtStmt(fld.fold_stmt(stmt) // this is probably okay, because the only folds likely // to peek inside interpolated nodes will be renamings/markings, // which map single items to single items .expect_one("expected fold to produce exactly one statement")), token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)), token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), token::NtIdent(ident, is_raw) => token::NtIdent(fld.fold_ident(ident), is_raw), token::NtLifetime(ident) => token::NtLifetime(fld.fold_ident(ident)), token::NtLiteral(expr) => token::NtLiteral(fld.fold_expr(expr)), token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)), token::NtPath(path) => token::NtPath(fld.fold_path(path)), token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)), token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), token::NtImplItem(item) => token::NtImplItem(fld.fold_impl_item(item) .expect_one("expected fold to produce exactly one item")), token::NtTraitItem(item) => token::NtTraitItem(fld.fold_trait_item(item) .expect_one("expected fold to produce exactly one item")), token::NtGenerics(generics) => token::NtGenerics(fld.fold_generics(generics)), token::NtWhereClause(where_clause) => token::NtWhereClause(fld.fold_where_clause(where_clause)), token::NtArg(arg) => token::NtArg(fld.fold_arg(arg)), token::NtVis(vis) => token::NtVis(fld.fold_vis(vis)), token::NtForeignItem(ni) => token::NtForeignItem(fld.fold_foreign_item(ni) // see reasoning above .expect_one("expected fold to produce exactly one item")), } } pub fn noop_fold_asyncness<T: Folder>(asyncness: IsAsync, fld: &mut T) -> IsAsync { match asyncness { IsAsync::Async { closure_id, return_impl_trait_id } => IsAsync::Async { closure_id: fld.new_id(closure_id), return_impl_trait_id: fld.new_id(return_impl_trait_id), }, IsAsync::NotAsync => IsAsync::NotAsync, } } pub fn noop_fold_fn_decl<T: Folder>(decl: P<FnDecl>, fld: &mut T) -> P<FnDecl> { decl.map(|FnDecl {inputs, output, variadic}| FnDecl { inputs: inputs.move_map(|x| fld.fold_arg(x)), output: match output { FunctionRetTy::Ty(ty) => FunctionRetTy::Ty(fld.fold_ty(ty)), FunctionRetTy::Default(span) => FunctionRetTy::Default(fld.new_span(span)), }, variadic, }) } pub fn noop_fold_param_bound<T>(pb: GenericBound, fld: &mut T) -> GenericBound where T: Folder { match pb { GenericBound::Trait(ty, modifier) => { GenericBound::Trait(fld.fold_poly_trait_ref(ty), modifier) } GenericBound::Outlives(lifetime) => { GenericBound::Outlives(noop_fold_lifetime(lifetime, fld)) } } } pub fn noop_fold_generic_param<T: Folder>(param: GenericParam, fld: &mut T) -> GenericParam { let attrs: Vec<_> = param.attrs.into(); GenericParam { ident: fld.fold_ident(param.ident), id: fld.new_id(param.id), attrs: attrs.into_iter() .flat_map(|x| fld.fold_attribute(x).into_iter()) .collect::<Vec<_>>() .into(), bounds: param.bounds.move_map(|l| noop_fold_param_bound(l, fld)), kind: match param.kind { GenericParamKind::Lifetime => GenericParamKind::Lifetime, GenericParamKind::Type { default } => GenericParamKind::Type { default: default.map(|ty| fld.fold_ty(ty)) } } } } pub fn noop_fold_generic_params<T: Folder>( params: Vec<GenericParam>, fld: &mut T ) -> Vec<GenericParam> { params.move_map(|p| fld.fold_generic_param(p)) } pub fn noop_fold_label<T: Folder>(label: Label, fld: &mut T) -> Label { Label { ident: fld.fold_ident(label.ident), } } fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime { Lifetime { id: fld.new_id(l.id), ident: fld.fold_ident(l.ident), } } pub fn noop_fold_generics<T: Folder>(Generics { params, where_clause, span }: Generics, fld: &mut T) -> Generics { Generics { params: fld.fold_generic_params(params), where_clause: fld.fold_where_clause(where_clause), span: fld.new_span(span), } } pub fn noop_fold_where_clause<T: Folder>( WhereClause {id, predicates, span}: WhereClause, fld: &mut T) -> WhereClause { WhereClause { id: fld.new_id(id), predicates: predicates.move_map(|predicate| { fld.fold_where_predicate(predicate) }), span, } } pub fn noop_fold_where_predicate<T: Folder>( pred: WherePredicate, fld: &mut T) -> WherePredicate { match pred { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{bound_generic_params, bounded_ty, bounds, span}) => { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { bound_generic_params: fld.fold_generic_params(bound_generic_params), bounded_ty: fld.fold_ty(bounded_ty), bounds: bounds.move_map(|x| fld.fold_param_bound(x)), span: fld.new_span(span) }) } ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{lifetime, bounds, span}) => { ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { span: fld.new_span(span), lifetime: noop_fold_lifetime(lifetime, fld), bounds: bounds.move_map(|bound| noop_fold_param_bound(bound, fld)) }) } ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{id, lhs_ty, rhs_ty, span}) => { ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{ id: fld.new_id(id), lhs_ty: fld.fold_ty(lhs_ty), rhs_ty: fld.fold_ty(rhs_ty), span: fld.new_span(span) }) } } } pub fn noop_fold_variant_data<T: Folder>(vdata: VariantData, fld: &mut T) -> VariantData { match vdata { ast::VariantData::Struct(fields, id) => { ast::VariantData::Struct(fields.move_map(|f| fld.fold_struct_field(f)), fld.new_id(id)) } ast::VariantData::Tuple(fields, id) => { ast::VariantData::Tuple(fields.move_map(|f| fld.fold_struct_field(f)), fld.new_id(id)) } ast::VariantData::Unit(id) => ast::VariantData::Unit(fld.new_id(id)) } } pub fn noop_fold_trait_ref<T: Folder>(p: TraitRef, fld: &mut T) -> TraitRef { let id = fld.new_id(p.ref_id); let TraitRef { path, ref_id: _, } = p; ast::TraitRef { path: fld.fold_path(path), ref_id: id, } } pub fn noop_fold_poly_trait_ref<T: Folder>(p: PolyTraitRef, fld: &mut T) -> PolyTraitRef { ast::PolyTraitRef { bound_generic_params: fld.fold_generic_params(p.bound_generic_params), trait_ref: fld.fold_trait_ref(p.trait_ref), span: fld.new_span(p.span), } } pub fn noop_fold_struct_field<T: Folder>(f: StructField, fld: &mut T) -> StructField { StructField { span: fld.new_span(f.span), id: fld.new_id(f.id), ident: f.ident.map(|ident| fld.fold_ident(ident)), vis: fld.fold_vis(f.vis), ty: fld.fold_ty(f.ty), attrs: fold_attrs(f.attrs, fld), } } pub fn noop_fold_field<T: Folder>(f: Field, folder: &mut T) -> Field { Field { ident: folder.fold_ident(f.ident), expr: folder.fold_expr(f.expr), span: folder.new_span(f.span), is_shorthand: f.is_shorthand, attrs: fold_thin_attrs(f.attrs, folder), } } pub fn noop_fold_mt<T: Folder>(MutTy {ty, mutbl}: MutTy, folder: &mut T) -> MutTy { MutTy { ty: folder.fold_ty(ty), mutbl, } } pub fn noop_fold_opt_bounds<T: Folder>(b: Option<GenericBounds>, folder: &mut T) -> Option<GenericBounds> { b.map(|bounds| folder.fold_bounds(bounds)) } fn noop_fold_bounds<T: Folder>(bounds: GenericBounds, folder: &mut T) -> GenericBounds { bounds.move_map(|bound| folder.fold_param_bound(bound)) } pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> { b.map(|Block {id, stmts, rules, span, recovered}| Block { id: folder.new_id(id), stmts: stmts.move_flat_map(|s| folder.fold_stmt(s).into_iter()), rules, span: folder.new_span(span), recovered, }) } pub fn noop_fold_item_kind<T: Folder>(i: ItemKind, folder: &mut T) -> ItemKind { match i { ItemKind::ExternCrate(orig_name) => ItemKind::ExternCrate(orig_name), ItemKind::Use(use_tree) => { ItemKind::Use(use_tree.map(|tree| folder.fold_use_tree(tree))) } ItemKind::Static(t, m, e) => { ItemKind::Static(folder.fold_ty(t), m, folder.fold_expr(e)) } ItemKind::Const(t, e) => { ItemKind::Const(folder.fold_ty(t), folder.fold_expr(e)) } ItemKind::Fn(decl, header, generics, body) => { let generics = folder.fold_generics(generics); let header = folder.fold_fn_header(header); let decl = folder.fold_fn_decl(decl); let body = folder.fold_block(body); ItemKind::Fn(decl, header, generics, body) } ItemKind::Mod(m) => ItemKind::Mod(folder.fold_mod(m)), ItemKind::ForeignMod(nm) => ItemKind::ForeignMod(folder.fold_foreign_mod(nm)), ItemKind::GlobalAsm(ga) => ItemKind::GlobalAsm(folder.fold_global_asm(ga)), ItemKind::Ty(t, generics) => { ItemKind::Ty(folder.fold_ty(t), folder.fold_generics(generics)) } ItemKind::Existential(bounds, generics) => ItemKind::Existential( folder.fold_bounds(bounds), folder.fold_generics(generics), ), ItemKind::Enum(enum_definition, generics) => { let generics = folder.fold_generics(generics); let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x)); ItemKind::Enum(ast::EnumDef { variants: variants }, generics) } ItemKind::Struct(struct_def, generics) => { let generics = folder.fold_generics(generics); ItemKind::Struct(folder.fold_variant_data(struct_def), generics) } ItemKind::Union(struct_def, generics) => { let generics = folder.fold_generics(generics); ItemKind::Union(folder.fold_variant_data(struct_def), generics) } ItemKind::Impl(unsafety, polarity, defaultness, generics, ifce, ty, impl_items) => ItemKind::Impl( unsafety, polarity, defaultness, folder.fold_generics(generics), ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref.clone())), folder.fold_ty(ty), impl_items.move_flat_map(|item| folder.fold_impl_item(item)), ), ItemKind::Trait(is_auto, unsafety, generics, bounds, items) => ItemKind::Trait( is_auto, unsafety, folder.fold_generics(generics), folder.fold_bounds(bounds), items.move_flat_map(|item| folder.fold_trait_item(item)), ), ItemKind::TraitAlias(generics, bounds) => ItemKind::TraitAlias( folder.fold_generics(generics), folder.fold_bounds(bounds)), ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)), ItemKind::MacroDef(def) => ItemKind::MacroDef(folder.fold_macro_def(def)), } } pub fn noop_fold_trait_item<T: Folder>(i: TraitItem, folder: &mut T) -> OneVector<TraitItem> { smallvec![TraitItem { id: folder.new_id(i.id), ident: folder.fold_ident(i.ident), attrs: fold_attrs(i.attrs, folder), generics: folder.fold_generics(i.generics), node: match i.node { TraitItemKind::Const(ty, default) => { TraitItemKind::Const(folder.fold_ty(ty), default.map(|x| folder.fold_expr(x))) } TraitItemKind::Method(sig, body) => { TraitItemKind::Method(noop_fold_method_sig(sig, folder), body.map(|x| folder.fold_block(x))) } TraitItemKind::Type(bounds, default) => { TraitItemKind::Type(folder.fold_bounds(bounds), default.map(|x| folder.fold_ty(x))) } ast::TraitItemKind::Macro(mac) => { TraitItemKind::Macro(folder.fold_mac(mac)) } }, span: folder.new_span(i.span), tokens: i.tokens, }] } pub fn noop_fold_impl_item<T: Folder>(i: ImplItem, folder: &mut T) -> OneVector<ImplItem> { smallvec![ImplItem { id: folder.new_id(i.id), vis: folder.fold_vis(i.vis), ident: folder.fold_ident(i.ident), attrs: fold_attrs(i.attrs, folder), generics: folder.fold_generics(i.generics), defaultness: i.defaultness, node: match i.node { ast::ImplItemKind::Const(ty, expr) => { ast::ImplItemKind::Const(folder.fold_ty(ty), folder.fold_expr(expr)) } ast::ImplItemKind::Method(sig, body) => { ast::ImplItemKind::Method(noop_fold_method_sig(sig, folder), folder.fold_block(body)) } ast::ImplItemKind::Type(ty) => ast::ImplItemKind::Type(folder.fold_ty(ty)), ast::ImplItemKind::Existential(bounds) => { ast::ImplItemKind::Existential(folder.fold_bounds(bounds)) }, ast::ImplItemKind::Macro(mac) => ast::ImplItemKind::Macro(folder.fold_mac(mac)) }, span: folder.new_span(i.span), tokens: i.tokens, }] } pub fn noop_fold_fn_header<T: Folder>(mut header: FnHeader, folder: &mut T) -> FnHeader { header.asyncness = folder.fold_asyncness(header.asyncness); header } pub fn noop_fold_mod<T: Folder>(Mod {inner, items}: Mod, folder: &mut T) -> Mod { Mod { inner: folder.new_span(inner), items: items.move_flat_map(|x| folder.fold_item(x)), } } pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, span}: Crate, folder: &mut T) -> Crate { let mut items = folder.fold_item(P(ast::Item { ident: keywords::Invalid.ident(), attrs, id: ast::DUMMY_NODE_ID, vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Public), span, node: ast::ItemKind::Mod(module), tokens: None, })).into_iter(); let (module, attrs, span) = match items.next() { Some(item) => { assert!(items.next().is_none(), "a crate cannot expand to more than one item"); item.and_then(|ast::Item { attrs, span, node, .. }| { match node { ast::ItemKind::Mod(m) => (m, attrs, span), _ => panic!("fold converted a module to not a module"), } }) } None => (ast::Mod { inner: span, items: vec![], }, vec![], span) }; Crate { module, attrs, span, } } // fold one item into possibly many items pub fn noop_fold_item<T: Folder>(i: P<Item>, folder: &mut T) -> OneVector<P<Item>> { smallvec![i.map(|i| folder.fold_item_simple(i))] } // fold one item into exactly one item pub fn noop_fold_item_simple<T: Folder>(Item {id, ident, attrs, node, vis, span, tokens}: Item, folder: &mut T) -> Item { Item { id: folder.new_id(id), vis: folder.fold_vis(vis), ident: folder.fold_ident(ident), attrs: fold_attrs(attrs, folder), node: folder.fold_item_kind(node), span: folder.new_span(span), // FIXME: if this is replaced with a call to `folder.fold_tts` it causes // an ICE during resolve... odd! tokens, } } pub fn noop_fold_foreign_item<T: Folder>(ni: ForeignItem, folder: &mut T) -> OneVector<ForeignItem> { smallvec![folder.fold_foreign_item_simple(ni)] } pub fn noop_fold_foreign_item_simple<T: Folder>(ni: ForeignItem, folder: &mut T) -> ForeignItem { ForeignItem { id: folder.new_id(ni.id), vis: folder.fold_vis(ni.vis), ident: folder.fold_ident(ni.ident), attrs: fold_attrs(ni.attrs, folder), node: match ni.node { ForeignItemKind::Fn(fdec, generics) => { ForeignItemKind::Fn(folder.fold_fn_decl(fdec), folder.fold_generics(generics)) } ForeignItemKind::Static(t, m) => { ForeignItemKind::Static(folder.fold_ty(t), m) } ForeignItemKind::Ty => ForeignItemKind::Ty, ForeignItemKind::Macro(mac) => ForeignItemKind::Macro(folder.fold_mac(mac)), }, span: folder.new_span(ni.span) } } pub fn noop_fold_method_sig<T: Folder>(sig: MethodSig, folder: &mut T) -> MethodSig { MethodSig { header: folder.fold_fn_header(sig.header), decl: folder.fold_fn_decl(sig.decl) } } pub fn noop_fold_pat<T: Folder>(p: P<Pat>, folder: &mut T) -> P<Pat> { p.map(|Pat {id, node, span}| Pat { id: folder.new_id(id), node: match node { PatKind::Wild => PatKind::Wild, PatKind::Ident(binding_mode, ident, sub) => { PatKind::Ident(binding_mode, folder.fold_ident(ident), sub.map(|x| folder.fold_pat(x))) } PatKind::Lit(e) => PatKind::Lit(folder.fold_expr(e)), PatKind::TupleStruct(pth, pats, ddpos) => { PatKind::TupleStruct(folder.fold_path(pth), pats.move_map(|x| folder.fold_pat(x)), ddpos) } PatKind::Path(qself, pth) => { let (qself, pth) = folder.fold_qpath(qself, pth); PatKind::Path(qself, pth) } PatKind::Struct(pth, fields, etc) => { let pth = folder.fold_path(pth); let fs = fields.move_map(|f| { Spanned { span: folder.new_span(f.span), node: ast::FieldPat { ident: folder.fold_ident(f.node.ident), pat: folder.fold_pat(f.node.pat), is_shorthand: f.node.is_shorthand, attrs: fold_attrs(f.node.attrs.into(), folder).into() }} }); PatKind::Struct(pth, fs, etc) } PatKind::Tuple(elts, ddpos) => { PatKind::Tuple(elts.move_map(|x| folder.fold_pat(x)), ddpos) } PatKind::Box(inner) => PatKind::Box(folder.fold_pat(inner)), PatKind::Ref(inner, mutbl) => PatKind::Ref(folder.fold_pat(inner), mutbl), PatKind::Range(e1, e2, Spanned { span, node: end }) => { PatKind::Range(folder.fold_expr(e1), folder.fold_expr(e2), Spanned { span, node: folder.fold_range_end(end) }) }, PatKind::Slice(before, slice, after) => { PatKind::Slice(before.move_map(|x| folder.fold_pat(x)), slice.map(|x| folder.fold_pat(x)), after.move_map(|x| folder.fold_pat(x))) } PatKind::Paren(inner) => PatKind::Paren(folder.fold_pat(inner)), PatKind::Mac(mac) => PatKind::Mac(folder.fold_mac(mac)) }, span: folder.new_span(span) }) } pub fn noop_fold_range_end<T: Folder>(end: RangeEnd, _folder: &mut T) -> RangeEnd { end } pub fn noop_fold_anon_const<T: Folder>(constant: AnonConst, folder: &mut T) -> AnonConst { let AnonConst {id, value} = constant; AnonConst { id: folder.new_id(id), value: folder.fold_expr(value), } } pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mut T) -> Expr { Expr { node: match node { ExprKind::Box(e) => { ExprKind::Box(folder.fold_expr(e)) } ExprKind::ObsoleteInPlace(a, b) => { ExprKind::ObsoleteInPlace(folder.fold_expr(a), folder.fold_expr(b)) } ExprKind::Array(exprs) => { ExprKind::Array(folder.fold_exprs(exprs)) } ExprKind::Repeat(expr, count) => { ExprKind::Repeat(folder.fold_expr(expr), folder.fold_anon_const(count)) } ExprKind::Tup(exprs) => ExprKind::Tup(folder.fold_exprs(exprs)), ExprKind::Call(f, args) => { ExprKind::Call(folder.fold_expr(f), folder.fold_exprs(args)) } ExprKind::MethodCall(seg, args) => { ExprKind::MethodCall( PathSegment { ident: folder.fold_ident(seg.ident), args: seg.args.map(|args| { args.map(|args| folder.fold_generic_args(args)) }), }, folder.fold_exprs(args)) } ExprKind::Binary(binop, lhs, rhs) => { ExprKind::Binary(binop, folder.fold_expr(lhs), folder.fold_expr(rhs)) } ExprKind::Unary(binop, ohs) => { ExprKind::Unary(binop, folder.fold_expr(ohs)) } ExprKind::Lit(l) => ExprKind::Lit(l), ExprKind::Cast(expr, ty) => { ExprKind::Cast(folder.fold_expr(expr), folder.fold_ty(ty)) } ExprKind::Type(expr, ty) => { ExprKind::Type(folder.fold_expr(expr), folder.fold_ty(ty)) } ExprKind::AddrOf(m, ohs) => ExprKind::AddrOf(m, folder.fold_expr(ohs)), ExprKind::If(cond, tr, fl) => { ExprKind::If(folder.fold_expr(cond), folder.fold_block(tr), fl.map(|x| folder.fold_expr(x))) } ExprKind::IfLet(pats, expr, tr, fl) => { ExprKind::IfLet(pats.move_map(|pat| folder.fold_pat(pat)), folder.fold_expr(expr), folder.fold_block(tr), fl.map(|x| folder.fold_expr(x))) } ExprKind::While(cond, body, opt_label) => { ExprKind::While(folder.fold_expr(cond), folder.fold_block(body), opt_label.map(|label| folder.fold_label(label))) } ExprKind::WhileLet(pats, expr, body, opt_label) => { ExprKind::WhileLet(pats.move_map(|pat| folder.fold_pat(pat)), folder.fold_expr(expr), folder.fold_block(body), opt_label.map(|label| folder.fold_label(label))) } ExprKind::ForLoop(pat, iter, body, opt_label) => { ExprKind::ForLoop(folder.fold_pat(pat), folder.fold_expr(iter), folder.fold_block(body), opt_label.map(|label| folder.fold_label(label))) } ExprKind::Loop(body, opt_label) => { ExprKind::Loop(folder.fold_block(body), opt_label.map(|label| folder.fold_label(label))) } ExprKind::Match(expr, arms) => { ExprKind::Match(folder.fold_expr(expr), arms.move_map(|x| folder.fold_arm(x))) } ExprKind::Closure(capture_clause, asyncness, movability, decl, body, span) => { ExprKind::Closure(capture_clause, folder.fold_asyncness(asyncness), movability, folder.fold_fn_decl(decl), folder.fold_expr(body), folder.new_span(span)) } ExprKind::Block(blk, opt_label) => { ExprKind::Block(folder.fold_block(blk), opt_label.map(|label| folder.fold_label(label))) } ExprKind::Async(capture_clause, node_id, body) => { ExprKind::Async( capture_clause, folder.new_id(node_id), folder.fold_block(body), ) } ExprKind::Assign(el, er) => { ExprKind::Assign(folder.fold_expr(el), folder.fold_expr(er)) } ExprKind::AssignOp(op, el, er) => { ExprKind::AssignOp(op, folder.fold_expr(el), folder.fold_expr(er)) } ExprKind::Field(el, ident) => { ExprKind::Field(folder.fold_expr(el), folder.fold_ident(ident)) } ExprKind::Index(el, er) => { ExprKind::Index(folder.fold_expr(el), folder.fold_expr(er)) } ExprKind::Range(e1, e2, lim) => { ExprKind::Range(e1.map(|x| folder.fold_expr(x)), e2.map(|x| folder.fold_expr(x)), lim) } ExprKind::Path(qself, path) => { let (qself, path) = folder.fold_qpath(qself, path); ExprKind::Path(qself, path) } ExprKind::Break(opt_label, opt_expr) => { ExprKind::Break(opt_label.map(|label| folder.fold_label(label)), opt_expr.map(|e| folder.fold_expr(e))) } ExprKind::Continue(opt_label) => { ExprKind::Continue(opt_label.map(|label| folder.fold_label(label))) } ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))), ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| { InlineAsm { inputs: asm.inputs.move_map(|(c, input)| { (c, folder.fold_expr(input)) }), outputs: asm.outputs.move_map(|out| { InlineAsmOutput { constraint: out.constraint, expr: folder.fold_expr(out.expr), is_rw: out.is_rw, is_indirect: out.is_indirect, } }), ..asm } })), ExprKind::Mac(mac) => ExprKind::Mac(folder.fold_mac(mac)), ExprKind::Struct(path, fields, maybe_expr) => { ExprKind::Struct(folder.fold_path(path), fields.move_map(|x| folder.fold_field(x)), maybe_expr.map(|x| folder.fold_expr(x))) }, ExprKind::Paren(ex) => { let sub_expr = folder.fold_expr(ex); return Expr { // Nodes that are equal modulo `Paren` sugar no-ops should have the same ids. id: sub_expr.id, node: ExprKind::Paren(sub_expr), span: folder.new_span(span), attrs: fold_attrs(attrs.into(), folder).into(), }; } ExprKind::Yield(ex) => ExprKind::Yield(ex.map(|x| folder.fold_expr(x))), ExprKind::Try(ex) => ExprKind::Try(folder.fold_expr(ex)), ExprKind::Catch(body) => ExprKind::Catch(folder.fold_block(body)), }, id: folder.new_id(id), span: folder.new_span(span), attrs: fold_attrs(attrs.into(), folder).into(), } } pub fn noop_fold_opt_expr<T: Folder>(e: P<Expr>, folder: &mut T) -> Option<P<Expr>> { Some(folder.fold_expr(e)) } pub fn noop_fold_exprs<T: Folder>(es: Vec<P<Expr>>, folder: &mut T) -> Vec<P<Expr>> { es.move_flat_map(|e| folder.fold_opt_expr(e)) } pub fn noop_fold_stmt<T: Folder>(Stmt {node, span, id}: Stmt, folder: &mut T) -> OneVector<Stmt> { let id = folder.new_id(id); let span = folder.new_span(span); noop_fold_stmt_kind(node, folder).into_iter().map(|node| { Stmt { id: id, node: node, span: span } }).collect() } pub fn noop_fold_stmt_kind<T: Folder>(node: StmtKind, folder: &mut T) -> OneVector<StmtKind> { match node { StmtKind::Local(local) => smallvec![StmtKind::Local(folder.fold_local(local))], StmtKind::Item(item) => folder.fold_item(item).into_iter().map(StmtKind::Item).collect(), StmtKind::Expr(expr) => { folder.fold_opt_expr(expr).into_iter().map(StmtKind::Expr).collect() } StmtKind::Semi(expr) => { folder.fold_opt_expr(expr).into_iter().map(StmtKind::Semi).collect() } StmtKind::Mac(mac) => smallvec![StmtKind::Mac(mac.map(|(mac, semi, attrs)| { (folder.fold_mac(mac), semi, fold_attrs(attrs.into(), folder).into()) }))], } } pub fn noop_fold_vis<T: Folder>(vis: Visibility, folder: &mut T) -> Visibility { match vis.node { VisibilityKind::Restricted { path, id } => { respan(vis.span, VisibilityKind::Restricted { path: path.map(|path| folder.fold_path(path)), id: folder.new_id(id), }) } _ => vis, } } #[cfg(test)] mod tests { use std::io; use ast::{self, Ident}; use util::parser_testing::{string_to_crate, matches_codepattern}; use print::pprust; use fold; use with_globals; use super::*; // this version doesn't care about getting comments or docstrings in. fn fake_print_crate(s: &mut pprust::State, krate: &ast::Crate) -> io::Result<()> { s.print_mod(&krate.module, &krate.attrs) } // change every identifier to "zz" struct ToZzIdentFolder; impl Folder for ToZzIdentFolder { fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident { Ident::from_str("zz") } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { fold::noop_fold_mac(mac, self) } } // maybe add to expand.rs... macro_rules! assert_pred { ($pred:expr, $predname:expr, $a:expr , $b:expr) => ( { let pred_val = $pred; let a_val = $a; let b_val = $b; if !(pred_val(&a_val, &b_val)) { panic!("expected args satisfying {}, got {} and {}", $predname, a_val, b_val); } } ) } // make sure idents get transformed everywhere #[test] fn ident_transformation () { with_globals(|| { let mut zz_fold = ToZzIdentFolder; let ast = string_to_crate( "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); let folded_crate = zz_fold.fold_crate(ast); assert_pred!( matches_codepattern, "matches_codepattern", pprust::to_string(|s| fake_print_crate(s, &folded_crate)), "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); }) } // even inside macro defs.... #[test] fn ident_transformation_in_defs () { with_globals(|| { let mut zz_fold = ToZzIdentFolder; let ast = string_to_crate( "macro_rules! a {(b $c:expr $(d $e:token)f+ => \ (g $(d $d $e)+))} ".to_string()); let folded_crate = zz_fold.fold_crate(ast); assert_pred!( matches_codepattern, "matches_codepattern", pprust::to_string(|s| fake_print_crate(s, &folded_crate)), "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string()); }) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ptr.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The AST pointer //! //! Provides `P<T>`, a frozen owned smart pointer, as a replacement for `@T` in //! the AST. //! //! # Motivations and benefits //! //! * **Identity**: sharing AST nodes is problematic for the various analysis //! passes (e.g. one may be able to bypass the borrow checker with a shared //! `ExprKind::AddrOf` node taking a mutable borrow). The only reason `@T` in the //! AST hasn't caused issues is because of inefficient folding passes which //! would always deduplicate any such shared nodes. Even if the AST were to //! switch to an arena, this would still hold, i.e. it couldn't use `&'a T`, //! but rather a wrapper like `P<'a, T>`. //! //! * **Immutability**: `P<T>` disallows mutating its inner `T`, unlike `Box<T>` //! (unless it contains an `Unsafe` interior, but that may be denied later). //! This mainly prevents mistakes, but can also enforces a kind of "purity". //! //! * **Efficiency**: folding can reuse allocation space for `P<T>` and `Vec<T>`, //! the latter even when the input and output types differ (as it would be the //! case with arenas or a GADT AST using type parameters to toggle features). //! //! * **Maintainability**: `P<T>` provides a fixed interface - `Deref`, //! `and_then` and `map` - which can remain fully functional even if the //! implementation changes (using a special thread-local heap, for example). //! Moreover, a switch to, e.g. `P<'a, T>` would be easy and mostly automated. use std::fmt::{self, Display, Debug}; use std::iter::FromIterator; use std::ops::Deref; use std::{mem, ptr, slice, vec}; use serialize::{Encodable, Decodable, Encoder, Decoder}; use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, HashStable}; /// An owned smart pointer. #[derive(Hash, PartialEq, Eq)] pub struct P<T: ?Sized> { ptr: Box<T> } #[allow(non_snake_case)] /// Construct a `P<T>` from a `T` value. pub fn P<T: 'static>(value: T) -> P<T> { P { ptr: Box::new(value) } } impl<T: 'static> P<T> { /// Move out of the pointer. /// Intended for chaining transformations not covered by `map`. pub fn and_then<U, F>(self, f: F) -> U where F: FnOnce(T) -> U, { f(*self.ptr) } /// Equivalent to and_then(|x| x) pub fn into_inner(self) -> T { *self.ptr } /// Transform the inner value, consuming `self` and producing a new `P<T>`. pub fn map<F>(mut self, f: F) -> P<T> where F: FnOnce(T) -> T, { let p: *mut T = &mut *self.ptr; // Leak self in case of panic. // FIXME(eddyb) Use some sort of "free guard" that // only deallocates, without dropping the pointee, // in case the call the `f` below ends in a panic. mem::forget(self); unsafe { ptr::write(p, f(ptr::read(p))); // Recreate self from the raw pointer. P { ptr: Box::from_raw(p) } } } } impl<T: ?Sized> Deref for P<T> { type Target = T; fn deref(&self) -> &T { &self.ptr } } impl<T: 'static + Clone> Clone for P<T> { fn clone(&self) -> P<T> { P((**self).clone()) } } impl<T: ?Sized + Debug> Debug for P<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Debug::fmt(&self.ptr, f) } } impl<T: Display> Display for P<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&**self, f) } } impl<T> fmt::Pointer for P<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.ptr, f) } } impl<T: 'static + Decodable> Decodable for P<T> { fn decode<D: Decoder>(d: &mut D) -> Result<P<T>, D::Error> { Decodable::decode(d).map(P) } } impl<T: Encodable> Encodable for P<T> { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { (**self).encode(s) } } impl<T> P<[T]> { pub fn new() -> P<[T]> { P { ptr: Default::default() } } #[inline(never)] pub fn from_vec(v: Vec<T>) -> P<[T]> { P { ptr: v.into_boxed_slice() } } #[inline(never)] pub fn into_vec(self) -> Vec<T> { self.ptr.into_vec() } } impl<T> Default for P<[T]> { /// Creates an empty `P<[T]>`. fn default() -> P<[T]> { P::new() } } impl<T: Clone> Clone for P<[T]> { fn clone(&self) -> P<[T]> { P::from_vec(self.to_vec()) } } impl<T> From<Vec<T>> for P<[T]> { fn from(v: Vec<T>) -> Self { P::from_vec(v) } } impl<T> Into<Vec<T>> for P<[T]> { fn into(self) -> Vec<T> { self.into_vec() } } impl<T> FromIterator<T> for P<[T]> { fn from_iter<I: IntoIterator<Item=T>>(iter: I) -> P<[T]> { P::from_vec(iter.into_iter().collect()) } } impl<T> IntoIterator for P<[T]> { type Item = T; type IntoIter = vec::IntoIter<T>; fn into_iter(self) -> Self::IntoIter { self.into_vec().into_iter() } } impl<'a, T> IntoIterator for &'a P<[T]> { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; fn into_iter(self) -> Self::IntoIter { self.ptr.into_iter() } } impl<T: Encodable> Encodable for P<[T]> { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { Encodable::encode(&**self, s) } } impl<T: Decodable> Decodable for P<[T]> { fn decode<D: Decoder>(d: &mut D) -> Result<P<[T]>, D::Error> { Ok(P::from_vec(Decodable::decode(d)?)) } } impl<CTX, T> HashStable<CTX> for P<T> where T: ?Sized + HashStable<CTX> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) { (**self).hash_stable(hcx, hasher); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/entry.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use attr; use ast::{Item, ItemKind}; pub enum EntryPointType { None, MainNamed, MainAttr, Start, OtherMain, // Not an entry point, but some other function named main } // Beware, this is duplicated in librustc/middle/entry.rs, make sure to keep // them in sync. pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType { match item.node { ItemKind::Fn(..) => { if attr::contains_name(&item.attrs, "start") { EntryPointType::Start } else if attr::contains_name(&item.attrs, "main") { EntryPointType::MainAttr } else if item.ident.name == "main" { if depth == 1 { // This is a top-level function so can be 'main' EntryPointType::MainNamed } else { EntryPointType::OtherMain } } else { EntryPointType::None } } _ => EntryPointType::None, } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/diagnostic_list.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(non_snake_case)] // Error messages for EXXXX errors. // Each message should start and end with a new line, and be wrapped to 80 characters. // In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable. register_long_diagnostics! { E0178: r##" In types, the `+` type operator has low precedence, so it is often necessary to use parentheses. For example: ```compile_fail,E0178 trait Foo {} struct Bar<'a> { w: &'a Foo + Copy, // error, use &'a (Foo + Copy) x: &'a Foo + 'a, // error, use &'a (Foo + 'a) y: &'a mut Foo + 'a, // error, use &'a mut (Foo + 'a) z: fn() -> Foo + 'a, // error, use fn() -> (Foo + 'a) } ``` More details can be found in [RFC 438]. [RFC 438]: https://github.com/rust-lang/rfcs/pull/438 "##, E0536: r##" The `not` cfg-predicate was malformed. Erroneous code example: ```compile_fail,E0536 #[cfg(not())] // error: expected 1 cfg-pattern pub fn something() {} pub fn main() {} ``` The `not` predicate expects one cfg-pattern. Example: ``` #[cfg(not(target_os = "linux"))] // ok! pub fn something() {} pub fn main() {} ``` For more information about the cfg attribute, read: https://doc.rust-lang.org/reference.html#conditional-compilation "##, E0537: r##" An unknown predicate was used inside the `cfg` attribute. Erroneous code example: ```compile_fail,E0537 #[cfg(unknown())] // error: invalid predicate `unknown` pub fn something() {} pub fn main() {} ``` The `cfg` attribute supports only three kinds of predicates: * any * all * not Example: ``` #[cfg(not(target_os = "linux"))] // ok! pub fn something() {} pub fn main() {} ``` For more information about the cfg attribute, read: https://doc.rust-lang.org/reference.html#conditional-compilation "##, E0538: r##" Attribute contains same meta item more than once. Erroneous code example: ```compile_fail,E0538 #[deprecated( since="1.0.0", note="First deprecation note.", note="Second deprecation note." // error: multiple same meta item )] fn deprecated_function() {} ``` Meta items are the key-value pairs inside of an attribute. Each key may only be used once in each attribute. To fix the problem, remove all but one of the meta items with the same key. Example: ``` #[deprecated( since="1.0.0", note="First deprecation note." )] fn deprecated_function() {} ``` "##, E0541: r##" An unknown meta item was used. Erroneous code example: ```compile_fail,E0541 #[deprecated( since="1.0.0", // error: unknown meta item reason="Example invalid meta item. Should be 'note'") ] fn deprecated_function() {} ``` Meta items are the key-value pairs inside of an attribute. The keys provided must be one of the valid keys for the specified attribute. To fix the problem, either remove the unknown meta item, or rename it if you provided the wrong name. In the erroneous code example above, the wrong name was provided, so changing to a correct one it will fix the error. Example: ``` #[deprecated( since="1.0.0", note="This is a valid meta item for the deprecated attribute." )] fn deprecated_function() {} ``` "##, E0552: r##" A unrecognized representation attribute was used. Erroneous code example: ```compile_fail,E0552 #[repr(D)] // error: unrecognized representation hint struct MyStruct { my_field: usize } ``` You can use a `repr` attribute to tell the compiler how you want a struct or enum to be laid out in memory. Make sure you're using one of the supported options: ``` #[repr(C)] // ok! struct MyStruct { my_field: usize } ``` For more information about specifying representations, see the ["Alternative Representations" section] of the Rustonomicon. ["Alternative Representations" section]: https://doc.rust-lang.org/nomicon/other-reprs.html "##, E0554: r##" Feature attributes are only allowed on the nightly release channel. Stable or beta compilers will not comply. Example of erroneous code (on a stable compiler): ```ignore (depends on release channel) #![feature(non_ascii_idents)] // error: #![feature] may not be used on the // stable release channel ``` If you need the feature, make sure to use a nightly release of the compiler (but be warned that the feature may be removed or altered in the future). "##, E0557: r##" A feature attribute named a feature that has been removed. Erroneous code example: ```compile_fail,E0557 #![feature(managed_boxes)] // error: feature has been removed ``` Delete the offending feature attribute. "##, E0565: r##" A literal was used in an attribute that doesn't support literals. Erroneous code example: ```ignore (compile_fail not working here; see Issue #43707) #![feature(attr_literals)] #[inline("always")] // error: unsupported literal pub fn something() {} ``` Literals in attributes are new and largely unsupported. Work to support literals where appropriate is ongoing. Try using an unquoted name instead: ``` #[inline(always)] pub fn something() {} ``` "##, E0583: r##" A file wasn't found for an out-of-line module. Erroneous code example: ```ignore (compile_fail not working here; see Issue #43707) mod file_that_doesnt_exist; // error: file not found for module fn main() {} ``` Please be sure that a file corresponding to the module exists. If you want to use a module named `file_that_doesnt_exist`, you need to have a file named `file_that_doesnt_exist.rs` or `file_that_doesnt_exist/mod.rs` in the same directory. "##, E0585: r##" A documentation comment that doesn't document anything was found. Erroneous code example: ```compile_fail,E0585 fn main() { // The following doc comment will fail: /// This is a useless doc comment! } ``` Documentation comments need to be followed by items, including functions, types, modules, etc. Examples: ``` /// I'm documenting the following struct: struct Foo; /// I'm documenting the following function: fn foo() {} ``` "##, E0586: r##" An inclusive range was used with no end. Erroneous code example: ```compile_fail,E0586 fn main() { let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1]; let x = &tmp[1..=]; // error: inclusive range was used with no end } ``` An inclusive range needs an end in order to *include* it. If you just need a start and no end, use a non-inclusive range (with `..`): ``` fn main() { let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1]; let x = &tmp[1..]; // ok! } ``` Or put an end to your inclusive range: ``` fn main() { let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1]; let x = &tmp[1..=3]; // ok! } ``` "##, E0589: r##" The value of `N` that was specified for `repr(align(N))` was not a power of two, or was greater than 2^29. ```compile_fail,E0589 #[repr(align(15))] // error: invalid `repr(align)` attribute: not a power of two enum Foo { Bar(u64), } ``` "##, E0658: r##" An unstable feature was used. Erroneous code example: ```compile_fail,E658 #[repr(u128)] // error: use of unstable library feature 'repr128' enum Foo { Bar(u64), } ``` If you're using a stable or a beta version of rustc, you won't be able to use any unstable features. In order to do so, please switch to a nightly version of rustc (by using rustup). If you're using a nightly version of rustc, just add the corresponding feature to be able to use it: ``` #![feature(repr128)] #[repr(u128)] // ok! enum Foo { Bar(u64), } ``` "##, E0633: r##" The `unwind` attribute was malformed. Erroneous code example: ```ignore (compile_fail not working here; see Issue #43707) #[unwind()] // error: expected one argument pub extern fn something() {} fn main() {} ``` The `#[unwind]` attribute should be used as follows: - `#[unwind(aborts)]` -- specifies that if a non-Rust ABI function should abort the process if it attempts to unwind. This is the safer and preferred option. - `#[unwind(allowed)]` -- specifies that a non-Rust ABI function should be allowed to unwind. This can easily result in Undefined Behavior (UB), so be careful. NB. The default behavior here is "allowed", but this is unspecified and likely to change in the future. "##, E0705: r##" A `#![feature]` attribute was declared for a feature that is stable in the current edition. Erroneous code example: ```ignore (limited to a warning during 2018 edition development) #![feature(rust_2018_preview)] #![feature(impl_header_lifetime_elision)] // error: the feature // `impl_header_lifetime_elision` is // included in the Rust 2018 edition ``` "##, } register_diagnostics! { E0539, // incorrect meta item E0540, // multiple rustc_deprecated attributes E0542, // missing 'since' E0543, // missing 'reason' E0544, // multiple stability levels E0545, // incorrect 'issue' E0546, // missing 'feature' E0547, // missing 'issue' E0548, // incorrect stability attribute type E0549, // rustc_deprecated attribute must be paired with either stable or unstable attribute E0550, // multiple deprecated attributes E0551, // incorrect meta item E0553, // multiple rustc_const_unstable attributes E0555, // malformed feature attribute, expected #![feature(...)] E0556, // malformed feature, expected just one word E0584, // file for module `..` found at both .. and .. E0629, // missing 'feature' (rustc_const_unstable) E0630, // rustc_const_unstable attribute must be paired with stable/unstable attribute E0693, // incorrect `repr(align)` attribute format E0694, // an unknown tool name found in scoped attributes E0703, // invalid ABI E0704, // incorrect visibility restriction }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ast.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // The Rust abstract syntax tree. pub use self::UnsafeSource::*; pub use self::GenericArgs::*; pub use symbol::{Ident, Symbol as Name}; pub use util::parser::ExprPrecedence; use syntax_pos::{Span, DUMMY_SP}; use source_map::{dummy_spanned, respan, Spanned}; use rustc_target::spec::abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; use print::pprust; use ptr::P; use rustc_data_structures::indexed_vec; use rustc_data_structures::indexed_vec::Idx; use symbol::{Symbol, keywords}; use ThinVec; use tokenstream::{ThinTokenStream, TokenStream}; use serialize::{self, Encoder, Decoder}; use std::collections::HashSet; use std::fmt; use rustc_data_structures::sync::Lrc; use std::u32; pub use rustc_target::abi::FloatTy; #[derive(Clone, RustcEncodable, RustcDecodable, Copy)] pub struct Label { pub ident: Ident, } impl fmt::Debug for Label { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "label({:?})", self.ident) } } #[derive(Clone, RustcEncodable, RustcDecodable, Copy)] pub struct Lifetime { pub id: NodeId, pub ident: Ident, } impl fmt::Debug for Lifetime { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "lifetime({}: {})", self.id, pprust::lifetime_to_string(self)) } } /// A "Path" is essentially Rust's notion of a name. /// /// It's represented as a sequence of identifiers, /// along with a bunch of supporting information. /// /// E.g. `std::cmp::PartialEq` #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Path { pub span: Span, /// The segments in the path: the things separated by `::`. /// Global paths begin with `keywords::CrateRoot`. pub segments: Vec<PathSegment>, } impl<'a> PartialEq<&'a str> for Path { fn eq(&self, string: &&'a str) -> bool { self.segments.len() == 1 && self.segments[0].ident.name == *string } } impl fmt::Debug for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "path({})", pprust::path_to_string(self)) } } impl fmt::Display for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", pprust::path_to_string(self)) } } impl Path { // convert a span and an identifier to the corresponding // 1-segment path pub fn from_ident(ident: Ident) -> Path { Path { segments: vec![PathSegment::from_ident(ident)], span: ident.span } } // Make a "crate root" segment for this path unless it already has it // or starts with something like `self`/`super`/`$crate`/etc. pub fn make_root(&self) -> Option<PathSegment> { if let Some(ident) = self.segments.get(0).map(|seg| seg.ident) { if ident.is_path_segment_keyword() { return None; } } Some(PathSegment::crate_root(self.span.shrink_to_lo())) } pub fn is_global(&self) -> bool { !self.segments.is_empty() && self.segments[0].ident.name == keywords::CrateRoot.name() } } /// A segment of a path: an identifier, an optional lifetime, and a set of types. /// /// E.g. `std`, `String` or `Box<T>` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct PathSegment { /// The identifier portion of this path segment. pub ident: Ident, /// Type/lifetime parameters attached to this path. They come in /// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. /// `None` means that no parameter list is supplied (`Path`), /// `Some` means that parameter list is supplied (`Path<X, Y>`) /// but it can be empty (`Path<>`). /// `P` is used as a size optimization for the common case with no parameters. pub args: Option<P<GenericArgs>>, } impl PathSegment { pub fn from_ident(ident: Ident) -> Self { PathSegment { ident, args: None } } pub fn crate_root(span: Span) -> Self { PathSegment::from_ident(Ident::new(keywords::CrateRoot.name(), span)) } } /// Arguments of a path segment. /// /// E.g. `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum GenericArgs { /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>` AngleBracketed(AngleBracketedArgs), /// The `(A,B)` and `C` in `Foo(A,B) -> C` Parenthesized(ParenthesisedArgs), } impl GenericArgs { pub fn span(&self) -> Span { match *self { AngleBracketed(ref data) => data.span, Parenthesized(ref data) => data.span, } } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum GenericArg { Lifetime(Lifetime), Type(P<Ty>), } /// A path like `Foo<'a, T>` #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)] pub struct AngleBracketedArgs { /// Overall span pub span: Span, /// The arguments for this path segment. pub args: Vec<GenericArg>, /// Bindings (equality constraints) on associated types, if present. /// /// E.g., `Foo<A=Bar>`. pub bindings: Vec<TypeBinding>, } impl Into<Option<P<GenericArgs>>> for AngleBracketedArgs { fn into(self) -> Option<P<GenericArgs>> { Some(P(GenericArgs::AngleBracketed(self))) } } impl Into<Option<P<GenericArgs>>> for ParenthesisedArgs { fn into(self) -> Option<P<GenericArgs>> { Some(P(GenericArgs::Parenthesized(self))) } } /// A path like `Foo(A,B) -> C` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct ParenthesisedArgs { /// Overall span pub span: Span, /// `(A,B)` pub inputs: Vec<P<Ty>>, /// `C` pub output: Option<P<Ty>>, } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub struct NodeId(u32); impl NodeId { pub fn new(x: usize) -> NodeId { assert!(x < (u32::MAX as usize)); NodeId(x as u32) } pub fn from_u32(x: u32) -> NodeId { NodeId(x) } pub fn as_usize(&self) -> usize { self.0 as usize } pub fn as_u32(&self) -> u32 { self.0 } pub fn placeholder_from_mark(mark: Mark) -> Self { NodeId(mark.as_u32()) } pub fn placeholder_to_mark(self) -> Mark { Mark::from_u32(self.0) } } impl fmt::Display for NodeId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } impl serialize::UseSpecializedEncodable for NodeId { fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_u32(self.0) } } impl serialize::UseSpecializedDecodable for NodeId { fn default_decode<D: Decoder>(d: &mut D) -> Result<NodeId, D::Error> { d.read_u32().map(NodeId) } } impl indexed_vec::Idx for NodeId { fn new(idx: usize) -> Self { NodeId::new(idx) } fn index(self) -> usize { self.as_usize() } } /// Node id used to represent the root of the crate. pub const CRATE_NODE_ID: NodeId = NodeId(0); /// When parsing and doing expansions, we initially give all AST nodes this AST /// node value. Then later, in the renumber pass, we renumber them to have /// small, positive ids. pub const DUMMY_NODE_ID: NodeId = NodeId(!0); /// A modifier on a bound, currently this is only used for `?Sized`, where the /// modifier is `Maybe`. Negative bounds should also be handled here. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)] pub enum TraitBoundModifier { None, Maybe, } /// The AST represents all type param bounds as types. /// typeck::collect::compute_bounds matches these against /// the "special" built-in traits (see middle::lang_items) and /// detects Copy, Send and Sync. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum GenericBound { Trait(PolyTraitRef, TraitBoundModifier), Outlives(Lifetime) } impl GenericBound { pub fn span(&self) -> Span { match self { &GenericBound::Trait(ref t, ..) => t.span, &GenericBound::Outlives(ref l) => l.ident.span, } } } pub type GenericBounds = Vec<GenericBound>; #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum GenericParamKind { /// A lifetime definition, e.g. `'a: 'b+'c+'d`. Lifetime, Type { default: Option<P<Ty>>, } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct GenericParam { pub id: NodeId, pub ident: Ident, pub attrs: ThinVec<Attribute>, pub bounds: GenericBounds, pub kind: GenericParamKind, } /// Represents lifetime, type and const parameters attached to a declaration of /// a function, enum, trait, etc. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Generics { pub params: Vec<GenericParam>, pub where_clause: WhereClause, pub span: Span, } impl Default for Generics { /// Creates an instance of `Generics`. fn default() -> Generics { Generics { params: Vec::new(), where_clause: WhereClause { id: DUMMY_NODE_ID, predicates: Vec::new(), span: DUMMY_SP, }, span: DUMMY_SP, } } } /// A `where` clause in a definition #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereClause { pub id: NodeId, pub predicates: Vec<WherePredicate>, pub span: Span, } /// A single predicate in a `where` clause #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum WherePredicate { /// A type binding, e.g. `for<'c> Foo: Send+Clone+'c` BoundPredicate(WhereBoundPredicate), /// A lifetime predicate, e.g. `'a: 'b+'c` RegionPredicate(WhereRegionPredicate), /// An equality predicate (unsupported) EqPredicate(WhereEqPredicate), } impl WherePredicate { pub fn span(&self) -> Span { match self { &WherePredicate::BoundPredicate(ref p) => p.span, &WherePredicate::RegionPredicate(ref p) => p.span, &WherePredicate::EqPredicate(ref p) => p.span, } } } /// A type bound. /// /// E.g. `for<'c> Foo: Send+Clone+'c` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereBoundPredicate { pub span: Span, /// Any generics from a `for` binding pub bound_generic_params: Vec<GenericParam>, /// The type being bounded pub bounded_ty: P<Ty>, /// Trait and lifetime bounds (`Clone+Send+'static`) pub bounds: GenericBounds, } /// A lifetime predicate. /// /// E.g. `'a: 'b+'c` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereRegionPredicate { pub span: Span, pub lifetime: Lifetime, pub bounds: GenericBounds, } /// An equality predicate (unsupported). /// /// E.g. `T=int` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereEqPredicate { pub id: NodeId, pub span: Span, pub lhs_ty: P<Ty>, pub rhs_ty: P<Ty>, } /// The set of MetaItems that define the compilation environment of the crate, /// used to drive conditional compilation pub type CrateConfig = HashSet<(Name, Option<Symbol>)>; #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Crate { pub module: Mod, pub attrs: Vec<Attribute>, pub span: Span, } /// A spanned compile-time attribute list item. pub type NestedMetaItem = Spanned<NestedMetaItemKind>; /// Possible values inside of compile-time attribute lists. /// /// E.g. the '..' in `#[name(..)]`. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum NestedMetaItemKind { /// A full MetaItem, for recursive meta items. MetaItem(MetaItem), /// A literal. /// /// E.g. "foo", 64, true Literal(Lit), } /// A spanned compile-time attribute item. /// /// E.g. `#[test]`, `#[derive(..)]`, `#[rustfmt::skip]` or `#[feature = "foo"]` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MetaItem { pub ident: Path, pub node: MetaItemKind, pub span: Span, } /// A compile-time attribute item. /// /// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum MetaItemKind { /// Word meta item. /// /// E.g. `test` as in `#[test]` Word, /// List meta item. /// /// E.g. `derive(..)` as in `#[derive(..)]` List(Vec<NestedMetaItem>), /// Name value meta item. /// /// E.g. `feature = "foo"` as in `#[feature = "foo"]` NameValue(Lit) } /// A Block (`{ .. }`). /// /// E.g. `{ .. }` as in `fn foo() { .. }` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Block { /// Statements in a block pub stmts: Vec<Stmt>, pub id: NodeId, /// Distinguishes between `unsafe { ... }` and `{ ... }` pub rules: BlockCheckMode, pub span: Span, pub recovered: bool, } #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Pat { pub id: NodeId, pub node: PatKind, pub span: Span, } impl fmt::Debug for Pat { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self)) } } impl Pat { pub(super) fn to_ty(&self) -> Option<P<Ty>> { let node = match &self.node { PatKind::Wild => TyKind::Infer, PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None) => TyKind::Path(None, Path::from_ident(*ident)), PatKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()), PatKind::Mac(mac) => TyKind::Mac(mac.clone()), PatKind::Ref(pat, mutbl) => pat.to_ty().map(|ty| TyKind::Rptr(None, MutTy { ty, mutbl: *mutbl }))?, PatKind::Slice(pats, None, _) if pats.len() == 1 => pats[0].to_ty().map(TyKind::Slice)?, PatKind::Tuple(pats, None) => { let mut tys = Vec::with_capacity(pats.len()); // FIXME(#48994) - could just be collected into an Option<Vec> for pat in pats { tys.push(pat.to_ty()?); } TyKind::Tup(tys) } _ => return None, }; Some(P(Ty { node, id: self.id, span: self.span })) } pub fn walk<F>(&self, it: &mut F) -> bool where F: FnMut(&Pat) -> bool { if !it(self) { return false; } match self.node { PatKind::Ident(_, _, Some(ref p)) => p.walk(it), PatKind::Struct(_, ref fields, _) => { fields.iter().all(|field| field.node.pat.walk(it)) } PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => { s.iter().all(|p| p.walk(it)) } PatKind::Box(ref s) | PatKind::Ref(ref s, _) | PatKind::Paren(ref s) => { s.walk(it) } PatKind::Slice(ref before, ref slice, ref after) => { before.iter().all(|p| p.walk(it)) && slice.iter().all(|p| p.walk(it)) && after.iter().all(|p| p.walk(it)) } PatKind::Wild | PatKind::Lit(_) | PatKind::Range(..) | PatKind::Ident(..) | PatKind::Path(..) | PatKind::Mac(_) => { true } } } } /// A single field in a struct pattern /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` /// are treated the same as` x: x, y: ref y, z: ref mut z`, /// except is_shorthand is true #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct FieldPat { /// The identifier for the field pub ident: Ident, /// The pattern the field is destructured to pub pat: P<Pat>, pub is_shorthand: bool, pub attrs: ThinVec<Attribute>, } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum BindingMode { ByRef(Mutability), ByValue(Mutability), } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum RangeEnd { Included(RangeSyntax), Excluded, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum RangeSyntax { DotDotDot, DotDotEq, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum PatKind { /// Represents a wildcard pattern (`_`) Wild, /// A `PatKind::Ident` may either be a new bound variable (`ref mut binding @ OPT_SUBPATTERN`), /// or a unit struct/variant pattern, or a const pattern (in the last two cases the third /// field must be `None`). Disambiguation cannot be done with parser alone, so it happens /// during name resolution. Ident(BindingMode, Ident, Option<P<Pat>>), /// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`. /// The `bool` is `true` in the presence of a `..`. Struct(Path, Vec<Spanned<FieldPat>>, bool), /// A tuple struct/variant pattern `Variant(x, y, .., z)`. /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. /// 0 <= position <= subpats.len() TupleStruct(Path, Vec<P<Pat>>, Option<usize>), /// A possibly qualified path pattern. /// Unqualified path patterns `A::B::C` can legally refer to variants, structs, constants /// or associated constants. Qualified path patterns `<A>::B::C`/`<A as Trait>::B::C` can /// only legally refer to associated constants. Path(Option<QSelf>, Path), /// A tuple pattern `(a, b)`. /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. /// 0 <= position <= subpats.len() Tuple(Vec<P<Pat>>, Option<usize>), /// A `box` pattern Box(P<Pat>), /// A reference pattern, e.g. `&mut (a, b)` Ref(P<Pat>, Mutability), /// A literal Lit(P<Expr>), /// A range pattern, e.g. `1...2`, `1..=2` or `1..2` Range(P<Expr>, P<Expr>, Spanned<RangeEnd>), /// `[a, b, ..i, y, z]` is represented as: /// `PatKind::Slice(box [a, b], Some(i), box [y, z])` Slice(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>), /// Parentheses in patters used for grouping, i.e. `(PAT)`. Paren(P<Pat>), /// A macro pattern; pre-expansion Mac(Mac), } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum Mutability { Mutable, Immutable, } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum BinOpKind { /// The `+` operator (addition) Add, /// The `-` operator (subtraction) Sub, /// The `*` operator (multiplication) Mul, /// The `/` operator (division) Div, /// The `%` operator (modulus) Rem, /// The `&&` operator (logical and) And, /// The `||` operator (logical or) Or, /// The `^` operator (bitwise xor) BitXor, /// The `&` operator (bitwise and) BitAnd, /// The `|` operator (bitwise or) BitOr, /// The `<<` operator (shift left) Shl, /// The `>>` operator (shift right) Shr, /// The `==` operator (equality) Eq, /// The `<` operator (less than) Lt, /// The `<=` operator (less than or equal to) Le, /// The `!=` operator (not equal to) Ne, /// The `>=` operator (greater than or equal to) Ge, /// The `>` operator (greater than) Gt, } impl BinOpKind { pub fn to_string(&self) -> &'static str { use self::BinOpKind::*; match *self { Add => "+", Sub => "-", Mul => "*", Div => "/", Rem => "%", And => "&&", Or => "||", BitXor => "^", BitAnd => "&", BitOr => "|", Shl => "<<", Shr => ">>", Eq => "==", Lt => "<", Le => "<=", Ne => "!=", Ge => ">=", Gt => ">", } } pub fn lazy(&self) -> bool { match *self { BinOpKind::And | BinOpKind::Or => true, _ => false } } pub fn is_shift(&self) -> bool { match *self { BinOpKind::Shl | BinOpKind::Shr => true, _ => false } } pub fn is_comparison(&self) -> bool { use self::BinOpKind::*; match *self { Eq | Lt | Le | Ne | Gt | Ge => true, And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false, } } /// Returns `true` if the binary operator takes its arguments by value pub fn is_by_value(&self) -> bool { !self.is_comparison() } } pub type BinOp = Spanned<BinOpKind>; #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum UnOp { /// The `*` operator for dereferencing Deref, /// The `!` operator for logical inversion Not, /// The `-` operator for negation Neg, } impl UnOp { /// Returns `true` if the unary operator takes its argument by value pub fn is_by_value(u: UnOp) -> bool { match u { UnOp::Neg | UnOp::Not => true, _ => false, } } pub fn to_string(op: UnOp) -> &'static str { match op { UnOp::Deref => "*", UnOp::Not => "!", UnOp::Neg => "-", } } } /// A statement #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Stmt { pub id: NodeId, pub node: StmtKind, pub span: Span, } impl Stmt { pub fn add_trailing_semicolon(mut self) -> Self { self.node = match self.node { StmtKind::Expr(expr) => StmtKind::Semi(expr), StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| { (mac, MacStmtStyle::Semicolon, attrs) })), node => node, }; self } pub fn is_item(&self) -> bool { match self.node { StmtKind::Item(_) => true, _ => false, } } pub fn is_expr(&self) -> bool { match self.node { StmtKind::Expr(_) => true, _ => false, } } } impl fmt::Debug for Stmt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "stmt({}: {})", self.id.to_string(), pprust::stmt_to_string(self)) } } #[derive(Clone, RustcEncodable, RustcDecodable)] pub enum StmtKind { /// A local (let) binding. Local(P<Local>), /// An item definition. Item(P<Item>), /// Expr without trailing semi-colon. Expr(P<Expr>), /// Expr with a trailing semi-colon. Semi(P<Expr>), /// Macro. Mac(P<(Mac, MacStmtStyle, ThinVec<Attribute>)>), } #[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum MacStmtStyle { /// The macro statement had a trailing semicolon, e.g. `foo! { ... };` /// `foo!(...);`, `foo![...];` Semicolon, /// The macro statement had braces; e.g. foo! { ... } Braces, /// The macro statement had parentheses or brackets and no semicolon; e.g. /// `foo!(...)`. All of these will end up being converted into macro /// expressions. NoBraces, } /// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Local { pub pat: P<Pat>, pub ty: Option<P<Ty>>, /// Initializer expression to set the value, if any pub init: Option<P<Expr>>, pub id: NodeId, pub span: Span, pub attrs: ThinVec<Attribute>, } /// An arm of a 'match'. /// /// E.g. `0..=10 => { println!("match!") }` as in /// /// ``` /// match 123 { /// 0..=10 => { println!("match!") }, /// _ => { println!("no match!") }, /// } /// ``` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Arm { pub attrs: Vec<Attribute>, pub pats: Vec<P<Pat>>, pub guard: Option<P<Expr>>, pub body: P<Expr>, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Field { pub ident: Ident, pub expr: P<Expr>, pub span: Span, pub is_shorthand: bool, pub attrs: ThinVec<Attribute>, } pub type SpannedIdent = Spanned<Ident>; #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum BlockCheckMode { Default, Unsafe(UnsafeSource), } #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum UnsafeSource { CompilerGenerated, UserProvided, } /// A constant (expression) that's not an item or associated item, /// but needs its own `DefId` for type-checking, const-eval, etc. /// These are usually found nested inside types (e.g. array lengths) /// or expressions (e.g. repeat counts), and also used to define /// explicit discriminant values for enum variants. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct AnonConst { pub id: NodeId, pub value: P<Expr>, } /// An expression #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Expr { pub id: NodeId, pub node: ExprKind, pub span: Span, pub attrs: ThinVec<Attribute> } impl Expr { /// Whether this expression would be valid somewhere that expects a value, for example, an `if` /// condition. pub fn returns(&self) -> bool { if let ExprKind::Block(ref block, _) = self.node { match block.stmts.last().map(|last_stmt| &last_stmt.node) { // implicit return Some(&StmtKind::Expr(_)) => true, Some(&StmtKind::Semi(ref expr)) => { if let ExprKind::Ret(_) = expr.node { // last statement is explicit return true } else { false } } // This is a block that doesn't end in either an implicit or explicit return _ => false, } } else { // This is not a block, it is a value true } } fn to_bound(&self) -> Option<GenericBound> { match &self.node { ExprKind::Path(None, path) => Some(GenericBound::Trait(PolyTraitRef::new(Vec::new(), path.clone(), self.span), TraitBoundModifier::None)), _ => None, } } pub(super) fn to_ty(&self) -> Option<P<Ty>> { let node = match &self.node { ExprKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()), ExprKind::Mac(mac) => TyKind::Mac(mac.clone()), ExprKind::Paren(expr) => expr.to_ty().map(TyKind::Paren)?, ExprKind::AddrOf(mutbl, expr) => expr.to_ty().map(|ty| TyKind::Rptr(None, MutTy { ty, mutbl: *mutbl }))?, ExprKind::Repeat(expr, expr_len) => expr.to_ty().map(|ty| TyKind::Array(ty, expr_len.clone()))?, ExprKind::Array(exprs) if exprs.len() == 1 => exprs[0].to_ty().map(TyKind::Slice)?, ExprKind::Tup(exprs) => { let tys = exprs.iter().map(|expr| expr.to_ty()).collect::<Option<Vec<_>>>()?; TyKind::Tup(tys) } ExprKind::Binary(binop, lhs, rhs) if binop.node == BinOpKind::Add => if let (Some(lhs), Some(rhs)) = (lhs.to_bound(), rhs.to_bound()) { TyKind::TraitObject(vec![lhs, rhs], TraitObjectSyntax::None) } else { return None; } _ => return None, }; Some(P(Ty { node, id: self.id, span: self.span })) } pub fn precedence(&self) -> ExprPrecedence { match self.node { ExprKind::Box(_) => ExprPrecedence::Box, ExprKind::ObsoleteInPlace(..) => ExprPrecedence::ObsoleteInPlace, ExprKind::Array(_) => ExprPrecedence::Array, ExprKind::Call(..) => ExprPrecedence::Call, ExprKind::MethodCall(..) => ExprPrecedence::MethodCall, ExprKind::Tup(_) => ExprPrecedence::Tup, ExprKind::Binary(op, ..) => ExprPrecedence::Binary(op.node), ExprKind::Unary(..) => ExprPrecedence::Unary, ExprKind::Lit(_) => ExprPrecedence::Lit, ExprKind::Type(..) | ExprKind::Cast(..) => ExprPrecedence::Cast, ExprKind::If(..) => ExprPrecedence::If, ExprKind::IfLet(..) => ExprPrecedence::IfLet, ExprKind::While(..) => ExprPrecedence::While, ExprKind::WhileLet(..) => ExprPrecedence::WhileLet, ExprKind::ForLoop(..) => ExprPrecedence::ForLoop, ExprKind::Loop(..) => ExprPrecedence::Loop, ExprKind::Match(..) => ExprPrecedence::Match, ExprKind::Closure(..) => ExprPrecedence::Closure, ExprKind::Block(..) => ExprPrecedence::Block, ExprKind::Catch(..) => ExprPrecedence::Catch, ExprKind::Async(..) => ExprPrecedence::Async, ExprKind::Assign(..) => ExprPrecedence::Assign, ExprKind::AssignOp(..) => ExprPrecedence::AssignOp, ExprKind::Field(..) => ExprPrecedence::Field, ExprKind::Index(..) => ExprPrecedence::Index, ExprKind::Range(..) => ExprPrecedence::Range, ExprKind::Path(..) => ExprPrecedence::Path, ExprKind::AddrOf(..) => ExprPrecedence::AddrOf, ExprKind::Break(..) => ExprPrecedence::Break, ExprKind::Continue(..) => ExprPrecedence::Continue, ExprKind::Ret(..) => ExprPrecedence::Ret, ExprKind::InlineAsm(..) => ExprPrecedence::InlineAsm, ExprKind::Mac(..) => ExprPrecedence::Mac, ExprKind::Struct(..) => ExprPrecedence::Struct, ExprKind::Repeat(..) => ExprPrecedence::Repeat, ExprKind::Paren(..) => ExprPrecedence::Paren, ExprKind::Try(..) => ExprPrecedence::Try, ExprKind::Yield(..) => ExprPrecedence::Yield, } } } impl fmt::Debug for Expr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self)) } } /// Limit types of a range (inclusive or exclusive) #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum RangeLimits { /// Inclusive at the beginning, exclusive at the end HalfOpen, /// Inclusive at the beginning and end Closed, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum ExprKind { /// A `box x` expression. Box(P<Expr>), /// First expr is the place; second expr is the value. ObsoleteInPlace(P<Expr>, P<Expr>), /// An array (`[a, b, c, d]`) Array(Vec<P<Expr>>), /// A function call /// /// The first field resolves to the function itself, /// and the second field is the list of arguments. /// This also represents calling the constructor of /// tuple-like ADTs such as tuple structs and enum variants. Call(P<Expr>, Vec<P<Expr>>), /// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`) /// /// The `PathSegment` represents the method name and its generic arguments /// (within the angle brackets). /// The first element of the vector of `Expr`s is the expression that evaluates /// to the object on which the method is being called on (the receiver), /// and the remaining elements are the rest of the arguments. /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as /// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`. MethodCall(PathSegment, Vec<P<Expr>>), /// A tuple (`(a, b, c ,d)`) Tup(Vec<P<Expr>>), /// A binary operation (For example: `a + b`, `a * b`) Binary(BinOp, P<Expr>, P<Expr>), /// A unary operation (For example: `!x`, `*x`) Unary(UnOp, P<Expr>), /// A literal (For example: `1`, `"foo"`) Lit(P<Lit>), /// A cast (`foo as f64`) Cast(P<Expr>, P<Ty>), Type(P<Expr>, P<Ty>), /// An `if` block, with an optional else block /// /// `if expr { block } else { expr }` If(P<Expr>, P<Block>, Option<P<Expr>>), /// An `if let` expression with an optional else block /// /// `if let pat = expr { block } else { expr }` /// /// This is desugared to a `match` expression. IfLet(Vec<P<Pat>>, P<Expr>, P<Block>, Option<P<Expr>>), /// A while loop, with an optional label /// /// `'label: while expr { block }` While(P<Expr>, P<Block>, Option<Label>), /// A while-let loop, with an optional label /// /// `'label: while let pat = expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. WhileLet(Vec<P<Pat>>, P<Expr>, P<Block>, Option<Label>), /// A for loop, with an optional label /// /// `'label: for pat in expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. ForLoop(P<Pat>, P<Expr>, P<Block>, Option<Label>), /// Conditionless loop (can be exited with break, continue, or return) /// /// `'label: loop { block }` Loop(P<Block>, Option<Label>), /// A `match` block. Match(P<Expr>, Vec<Arm>), /// A closure (for example, `move |a, b, c| a + b + c`) /// /// The final span is the span of the argument block `|...|` Closure(CaptureBy, IsAsync, Movability, P<FnDecl>, P<Expr>, Span), /// A block (`'label: { ... }`) Block(P<Block>, Option<Label>), /// An async block (`async move { ... }`) /// /// The `NodeId` is the `NodeId` for the closure that results from /// desugaring an async block, just like the NodeId field in the /// `IsAsync` enum. This is necessary in order to create a def for the /// closure which can be used as a parent of any child defs. Defs /// created during lowering cannot be made the parent of any other /// preexisting defs. Async(CaptureBy, NodeId, P<Block>), /// A catch block (`catch { ... }`) Catch(P<Block>), /// An assignment (`a = foo()`) Assign(P<Expr>, P<Expr>), /// An assignment with an operator /// /// For example, `a += 1`. AssignOp(BinOp, P<Expr>, P<Expr>), /// Access of a named (`obj.foo`) or unnamed (`obj.0`) struct field Field(P<Expr>, Ident), /// An indexing operation (`foo[2]`) Index(P<Expr>, P<Expr>), /// A range (`1..2`, `1..`, `..2`, `1...2`, `1...`, `...2`) Range(Option<P<Expr>>, Option<P<Expr>>, RangeLimits), /// Variable reference, possibly containing `::` and/or type /// parameters, e.g. foo::bar::<baz>. /// /// Optionally "qualified", /// E.g. `<Vec<T> as SomeTrait>::SomeType`. Path(Option<QSelf>, Path), /// A referencing operation (`&a` or `&mut a`) AddrOf(Mutability, P<Expr>), /// A `break`, with an optional label to break, and an optional expression Break(Option<Label>, Option<P<Expr>>), /// A `continue`, with an optional label Continue(Option<Label>), /// A `return`, with an optional value to be returned Ret(Option<P<Expr>>), /// Output of the `asm!()` macro InlineAsm(P<InlineAsm>), /// A macro invocation; pre-expansion Mac(Mac), /// A struct literal expression. /// /// For example, `Foo {x: 1, y: 2}`, or /// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`. Struct(Path, Vec<Field>, Option<P<Expr>>), /// An array literal constructed from one repeated element. /// /// For example, `[1; 5]`. The expression is the element to be /// repeated; the constant is the number of times to repeat it. Repeat(P<Expr>, AnonConst), /// No-op: used solely so we can pretty-print faithfully Paren(P<Expr>), /// `expr?` Try(P<Expr>), /// A `yield`, with an optional value to be yielded Yield(Option<P<Expr>>), } /// The explicit Self type in a "qualified path". The actual /// path, including the trait and the associated item, is stored /// separately. `position` represents the index of the associated /// item qualified with this Self type. /// /// ```ignore (only-for-syntax-highlight) /// <Vec<T> as a::b::Trait>::AssociatedItem /// ^~~~~ ~~~~~~~~~~~~~~^ /// ty position = 3 /// /// <Vec<T>>::AssociatedItem /// ^~~~~ ^ /// ty position = 0 /// ``` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct QSelf { pub ty: P<Ty>, /// The span of `a::b::Trait` in a path like `<Vec<T> as /// a::b::Trait>::AssociatedItem`; in the case where `position == /// 0`, this is an empty span. pub path_span: Span, pub position: usize } /// A capture clause #[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum CaptureBy { Value, Ref, } /// The movability of a generator / closure literal #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum Movability { Static, Movable, } pub type Mac = Spanned<Mac_>; /// Represents a macro invocation. The Path indicates which macro /// is being invoked, and the vector of token-trees contains the source /// of the macro invocation. /// /// NB: the additional ident for a macro_rules-style macro is actually /// stored in the enclosing item. Oog. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Mac_ { pub path: Path, pub delim: MacDelimiter, pub tts: ThinTokenStream, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)] pub enum MacDelimiter { Parenthesis, Bracket, Brace, } impl Mac_ { pub fn stream(&self) -> TokenStream { self.tts.clone().into() } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MacroDef { pub tokens: ThinTokenStream, pub legacy: bool, } impl MacroDef { pub fn stream(&self) -> TokenStream { self.tokens.clone().into() } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)] pub enum StrStyle { /// A regular string, like `"foo"` Cooked, /// A raw string, like `r##"foo"##` /// /// The value is the number of `#` symbols used. Raw(u16) } /// A literal pub type Lit = Spanned<LitKind>; #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy, Hash, PartialEq)] pub enum LitIntType { Signed(IntTy), Unsigned(UintTy), Unsuffixed, } /// Literal kind. /// /// E.g. `"foo"`, `42`, `12.34` or `bool` #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Hash, PartialEq)] pub enum LitKind { /// A string literal (`"foo"`) Str(Symbol, StrStyle), /// A byte string (`b"foo"`) ByteStr(Lrc<Vec<u8>>), /// A byte char (`b'f'`) Byte(u8), /// A character literal (`'a'`) Char(char), /// An integer literal (`1`) Int(u128, LitIntType), /// A float literal (`1f64` or `1E10f64`) Float(Symbol, FloatTy), /// A float literal without a suffix (`1.0 or 1.0E10`) FloatUnsuffixed(Symbol), /// A boolean literal Bool(bool), } impl LitKind { /// Returns true if this literal is a string and false otherwise. pub fn is_str(&self) -> bool { match *self { LitKind::Str(..) => true, _ => false, } } /// Returns true if this is a numeric literal. pub fn is_numeric(&self) -> bool { match *self { LitKind::Int(..) | LitKind::Float(..) | LitKind::FloatUnsuffixed(..) => true, _ => false, } } /// Returns true if this literal has no suffix. Note: this will return true /// for literals with prefixes such as raw strings and byte strings. pub fn is_unsuffixed(&self) -> bool { match *self { // unsuffixed variants LitKind::Str(..) | LitKind::ByteStr(..) | LitKind::Byte(..) | LitKind::Char(..) | LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::FloatUnsuffixed(..) | LitKind::Bool(..) => true, // suffixed variants LitKind::Int(_, LitIntType::Signed(..)) | LitKind::Int(_, LitIntType::Unsigned(..)) | LitKind::Float(..) => false, } } /// Returns true if this literal has a suffix. pub fn is_suffixed(&self) -> bool { !self.is_unsuffixed() } } // NB: If you change this, you'll probably want to change the corresponding // type structure in middle/ty.rs as well. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MutTy { pub ty: P<Ty>, pub mutbl: Mutability, } /// Represents a method's signature in a trait declaration, /// or in an implementation. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MethodSig { pub header: FnHeader, pub decl: P<FnDecl>, } /// Represents an item declaration within a trait declaration, /// possibly including a default implementation. A trait item is /// either required (meaning it doesn't have an implementation, just a /// signature) or provided (meaning it has a default implementation). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct TraitItem { pub id: NodeId, pub ident: Ident, pub attrs: Vec<Attribute>, pub generics: Generics, pub node: TraitItemKind, pub span: Span, /// See `Item::tokens` for what this is pub tokens: Option<TokenStream>, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum TraitItemKind { Const(P<Ty>, Option<P<Expr>>), Method(MethodSig, Option<P<Block>>), Type(GenericBounds, Option<P<Ty>>), Macro(Mac), } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct ImplItem { pub id: NodeId, pub ident: Ident, pub vis: Visibility, pub defaultness: Defaultness, pub attrs: Vec<Attribute>, pub generics: Generics, pub node: ImplItemKind, pub span: Span, /// See `Item::tokens` for what this is pub tokens: Option<TokenStream>, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum ImplItemKind { Const(P<Ty>, P<Expr>), Method(MethodSig, P<Block>), Type(P<Ty>), Existential(GenericBounds), Macro(Mac), } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Copy)] pub enum IntTy { Isize, I8, I16, I32, I64, I128, } impl fmt::Debug for IntTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for IntTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.ty_to_string()) } } impl IntTy { pub fn ty_to_string(&self) -> &'static str { match *self { IntTy::Isize => "isize", IntTy::I8 => "i8", IntTy::I16 => "i16", IntTy::I32 => "i32", IntTy::I64 => "i64", IntTy::I128 => "i128", } } pub fn val_to_string(&self, val: i128) -> String { // cast to a u128 so we can correctly print INT128_MIN. All integral types // are parsed as u128, so we wouldn't want to print an extra negative // sign. format!("{}{}", val as u128, self.ty_to_string()) } pub fn bit_width(&self) -> Option<usize> { Some(match *self { IntTy::Isize => return None, IntTy::I8 => 8, IntTy::I16 => 16, IntTy::I32 => 32, IntTy::I64 => 64, IntTy::I128 => 128, }) } } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Copy)] pub enum UintTy { Usize, U8, U16, U32, U64, U128, } impl UintTy { pub fn ty_to_string(&self) -> &'static str { match *self { UintTy::Usize => "usize", UintTy::U8 => "u8", UintTy::U16 => "u16", UintTy::U32 => "u32", UintTy::U64 => "u64", UintTy::U128 => "u128", } } pub fn val_to_string(&self, val: u128) -> String { format!("{}{}", val, self.ty_to_string()) } pub fn bit_width(&self) -> Option<usize> { Some(match *self { UintTy::Usize => return None, UintTy::U8 => 8, UintTy::U16 => 16, UintTy::U32 => 32, UintTy::U64 => 64, UintTy::U128 => 128, }) } } impl fmt::Debug for UintTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for UintTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.ty_to_string()) } } // Bind a type to an associated type: `A=Foo`. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct TypeBinding { pub id: NodeId, pub ident: Ident, pub ty: P<Ty>, pub span: Span, } #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Ty { pub id: NodeId, pub node: TyKind, pub span: Span, } impl fmt::Debug for Ty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "type({})", pprust::ty_to_string(self)) } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct BareFnTy { pub unsafety: Unsafety, pub abi: Abi, pub generic_params: Vec<GenericParam>, pub decl: P<FnDecl> } /// The different kinds of types recognized by the compiler #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum TyKind { /// A variable-length slice (`[T]`) Slice(P<Ty>), /// A fixed length array (`[T; n]`) Array(P<Ty>, AnonConst), /// A raw pointer (`*const T` or `*mut T`) Ptr(MutTy), /// A reference (`&'a T` or `&'a mut T`) Rptr(Option<Lifetime>, MutTy), /// A bare function (e.g. `fn(usize) -> bool`) BareFn(P<BareFnTy>), /// The never type (`!`) Never, /// A tuple (`(A, B, C, D,...)`) Tup(Vec<P<Ty>> ), /// A path (`module::module::...::Type`), optionally /// "qualified", e.g. `<Vec<T> as SomeTrait>::SomeType`. /// /// Type parameters are stored in the Path itself Path(Option<QSelf>, Path), /// A trait object type `Bound1 + Bound2 + Bound3` /// where `Bound` is a trait or a lifetime. TraitObject(GenericBounds, TraitObjectSyntax), /// An `impl Bound1 + Bound2 + Bound3` type /// where `Bound` is a trait or a lifetime. /// /// The `NodeId` exists to prevent lowering from having to /// generate `NodeId`s on the fly, which would complicate /// the generation of `existential type` items significantly ImplTrait(NodeId, GenericBounds), /// No-op; kept solely so that we can pretty-print faithfully Paren(P<Ty>), /// Unused for now Typeof(AnonConst), /// TyKind::Infer means the type should be inferred instead of it having been /// specified. This can appear anywhere in a type. Infer, /// Inferred type of a `self` or `&self` argument in a method. ImplicitSelf, // A macro in the type position. Mac(Mac), /// Placeholder for a kind that has failed to be defined. Err, } impl TyKind { pub fn is_implicit_self(&self) -> bool { if let TyKind::ImplicitSelf = *self { true } else { false } } crate fn is_unit(&self) -> bool { if let TyKind::Tup(ref tys) = *self { tys.is_empty() } else { false } } } /// Syntax used to declare a trait object. #[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum TraitObjectSyntax { Dyn, None, } /// Inline assembly dialect. /// /// E.g. `"intel"` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")` #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum AsmDialect { Att, Intel, } /// Inline assembly. /// /// E.g. `"={eax}"(result)` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct InlineAsmOutput { pub constraint: Symbol, pub expr: P<Expr>, pub is_rw: bool, pub is_indirect: bool, } /// Inline assembly. /// /// E.g. `asm!("NOP");` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct InlineAsm { pub asm: Symbol, pub asm_str_style: StrStyle, pub outputs: Vec<InlineAsmOutput>, pub inputs: Vec<(Symbol, P<Expr>)>, pub clobbers: Vec<Symbol>, pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, pub ctxt: SyntaxContext, } /// An argument in a function header. /// /// E.g. `bar: usize` as in `fn foo(bar: usize)` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Arg { pub ty: P<Ty>, pub pat: P<Pat>, pub id: NodeId, } /// Alternative representation for `Arg`s describing `self` parameter of methods. /// /// E.g. `&mut self` as in `fn foo(&mut self)` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum SelfKind { /// `self`, `mut self` Value(Mutability), /// `&'lt self`, `&'lt mut self` Region(Option<Lifetime>, Mutability), /// `self: TYPE`, `mut self: TYPE` Explicit(P<Ty>, Mutability), } pub type ExplicitSelf = Spanned<SelfKind>; impl Arg { pub fn to_self(&self) -> Option<ExplicitSelf> { if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.node { if ident.name == keywords::SelfValue.name() { return match self.ty.node { TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))), TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node.is_implicit_self() => { Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) } _ => Some(respan(self.pat.span.to(self.ty.span), SelfKind::Explicit(self.ty.clone(), mutbl))), } } } None } pub fn is_self(&self) -> bool { if let PatKind::Ident(_, ident, _) = self.pat.node { ident.name == keywords::SelfValue.name() } else { false } } pub fn from_self(eself: ExplicitSelf, eself_ident: Ident) -> Arg { let span = eself.span.to(eself_ident.span); let infer_ty = P(Ty { id: DUMMY_NODE_ID, node: TyKind::ImplicitSelf, span, }); let arg = |mutbl, ty| Arg { pat: P(Pat { id: DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(mutbl), eself_ident, None), span, }), ty, id: DUMMY_NODE_ID, }; match eself.node { SelfKind::Explicit(ty, mutbl) => arg(mutbl, ty), SelfKind::Value(mutbl) => arg(mutbl, infer_ty), SelfKind::Region(lt, mutbl) => arg(Mutability::Immutable, P(Ty { id: DUMMY_NODE_ID, node: TyKind::Rptr(lt, MutTy { ty: infer_ty, mutbl: mutbl }), span, })), } } } /// Header (not the body) of a function declaration. /// /// E.g. `fn foo(bar: baz)` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct FnDecl { pub inputs: Vec<Arg>, pub output: FunctionRetTy, pub variadic: bool } impl FnDecl { pub fn get_self(&self) -> Option<ExplicitSelf> { self.inputs.get(0).and_then(Arg::to_self) } pub fn has_self(&self) -> bool { self.inputs.get(0).map(Arg::is_self).unwrap_or(false) } } /// Is the trait definition an auto trait? #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum IsAuto { Yes, No } #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum Unsafety { Unsafe, Normal, } #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug)] pub enum IsAsync { Async { closure_id: NodeId, return_impl_trait_id: NodeId, }, NotAsync, } impl IsAsync { pub fn is_async(self) -> bool { if let IsAsync::Async { .. } = self { true } else { false } } /// In case this is an `Async` return the `NodeId` for the generated impl Trait item pub fn opt_return_id(self) -> Option<NodeId> { match self { IsAsync::Async { return_impl_trait_id, .. } => Some(return_impl_trait_id), IsAsync::NotAsync => None, } } } #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum Constness { Const, NotConst, } #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub enum Defaultness { Default, Final, } impl fmt::Display for Unsafety { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(match *self { Unsafety::Normal => "normal", Unsafety::Unsafe => "unsafe", }, f) } } #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum ImplPolarity { /// `impl Trait for Type` Positive, /// `impl !Trait for Type` Negative, } impl fmt::Debug for ImplPolarity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ImplPolarity::Positive => "positive".fmt(f), ImplPolarity::Negative => "negative".fmt(f), } } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum FunctionRetTy { /// Return type is not specified. /// /// Functions default to `()` and /// closures default to inference. Span points to where return /// type would be inserted. Default(Span), /// Everything else Ty(P<Ty>), } impl FunctionRetTy { pub fn span(&self) -> Span { match *self { FunctionRetTy::Default(span) => span, FunctionRetTy::Ty(ref ty) => ty.span, } } } /// Module declaration. /// /// E.g. `mod foo;` or `mod foo { .. }` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Mod { /// A span from the first token past `{` to the last token until `}`. /// For `mod foo;`, the inner span ranges from the first token /// to the last token in the external file. pub inner: Span, pub items: Vec<P<Item>>, } /// Foreign module declaration. /// /// E.g. `extern { .. }` or `extern C { .. }` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct ForeignMod { pub abi: Abi, pub items: Vec<ForeignItem>, } /// Global inline assembly /// /// aka module-level assembly or file-scoped assembly #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)] pub struct GlobalAsm { pub asm: Symbol, pub ctxt: SyntaxContext, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct EnumDef { pub variants: Vec<Variant>, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Variant_ { pub ident: Ident, pub attrs: Vec<Attribute>, pub data: VariantData, /// Explicit discriminant, e.g. `Foo = 1` pub disr_expr: Option<AnonConst>, } pub type Variant = Spanned<Variant_>; /// Part of `use` item to the right of its prefix. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum UseTreeKind { /// `use prefix` or `use prefix as rename` /// /// The extra `NodeId`s are for HIR lowering, when additional statements are created for each /// namespace. Simple(Option<Ident>, NodeId, NodeId), /// `use prefix::{...}` Nested(Vec<(UseTree, NodeId)>), /// `use prefix::*` Glob, } /// A tree of paths sharing common prefixes. /// Used in `use` items both at top-level and inside of braces in import groups. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct UseTree { pub prefix: Path, pub kind: UseTreeKind, pub span: Span, } impl UseTree { pub fn ident(&self) -> Ident { match self.kind { UseTreeKind::Simple(Some(rename), ..) => rename, UseTreeKind::Simple(None, ..) => self.prefix.segments.last().expect("empty prefix in a simple import").ident, _ => panic!("`UseTree::ident` can only be used on a simple import"), } } } /// Distinguishes between Attributes that decorate items and Attributes that /// are contained as statements within items. These two cases need to be /// distinguished for pretty-printing. #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum AttrStyle { Outer, Inner, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, PartialOrd, Ord, Copy)] pub struct AttrId(pub usize); impl Idx for AttrId { fn new(idx: usize) -> Self { AttrId(idx) } fn index(self) -> usize { self.0 } } /// Meta-data associated with an item /// Doc-comments are promoted to attributes that have is_sugared_doc = true #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Attribute { pub id: AttrId, pub style: AttrStyle, pub path: Path, pub tokens: TokenStream, pub is_sugared_doc: bool, pub span: Span, } /// TraitRef's appear in impls. /// /// resolve maps each TraitRef's ref_id to its defining trait; that's all /// that the ref_id is for. The impl_id maps to the "self type" of this impl. /// If this impl is an ItemKind::Impl, the impl_id is redundant (it could be the /// same as the impl's node id). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct TraitRef { pub path: Path, pub ref_id: NodeId, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct PolyTraitRef { /// The `'a` in `<'a> Foo<&'a T>` pub bound_generic_params: Vec<GenericParam>, /// The `Foo<&'a T>` in `<'a> Foo<&'a T>` pub trait_ref: TraitRef, pub span: Span, } impl PolyTraitRef { pub fn new(generic_params: Vec<GenericParam>, path: Path, span: Span) -> Self { PolyTraitRef { bound_generic_params: generic_params, trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID }, span, } } } #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug)] pub enum CrateSugar { /// Source is `pub(crate)` PubCrate, /// Source is (just) `crate` JustCrate, } pub type Visibility = Spanned<VisibilityKind>; #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum VisibilityKind { Public, Crate(CrateSugar), Restricted { path: P<Path>, id: NodeId }, Inherited, } impl VisibilityKind { pub fn is_pub(&self) -> bool { if let VisibilityKind::Public = *self { true } else { false } } } /// Field of a struct. /// /// E.g. `bar: usize` as in `struct Foo { bar: usize }` #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct StructField { pub span: Span, pub ident: Option<Ident>, pub vis: Visibility, pub id: NodeId, pub ty: P<Ty>, pub attrs: Vec<Attribute>, } /// Fields and Ids of enum variants and structs /// /// For enum variants: `NodeId` represents both an Id of the variant itself (relevant for all /// variant kinds) and an Id of the variant's constructor (not relevant for `Struct`-variants). /// One shared Id can be successfully used for these two purposes. /// Id of the whole enum lives in `Item`. /// /// For structs: `NodeId` represents an Id of the structure's constructor, so it is not actually /// used for `Struct`-structs (but still presents). Structures don't have an analogue of "Id of /// the variant itself" from enum variants. /// Id of the whole struct lives in `Item`. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum VariantData { /// Struct variant. /// /// E.g. `Bar { .. }` as in `enum Foo { Bar { .. } }` Struct(Vec<StructField>, NodeId), /// Tuple variant. /// /// E.g. `Bar(..)` as in `enum Foo { Bar(..) }` Tuple(Vec<StructField>, NodeId), /// Unit variant. /// /// E.g. `Bar = ..` as in `enum Foo { Bar = .. }` Unit(NodeId), } impl VariantData { pub fn fields(&self) -> &[StructField] { match *self { VariantData::Struct(ref fields, _) | VariantData::Tuple(ref fields, _) => fields, _ => &[], } } pub fn id(&self) -> NodeId { match *self { VariantData::Struct(_, id) | VariantData::Tuple(_, id) | VariantData::Unit(id) => id } } pub fn is_struct(&self) -> bool { if let VariantData::Struct(..) = *self { true } else { false } } pub fn is_tuple(&self) -> bool { if let VariantData::Tuple(..) = *self { true } else { false } } pub fn is_unit(&self) -> bool { if let VariantData::Unit(..) = *self { true } else { false } } } /// An item /// /// The name might be a dummy name in case of anonymous items #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Item { pub ident: Ident, pub attrs: Vec<Attribute>, pub id: NodeId, pub node: ItemKind, pub vis: Visibility, pub span: Span, /// Original tokens this item was parsed from. This isn't necessarily /// available for all items, although over time more and more items should /// have this be `Some`. Right now this is primarily used for procedural /// macros, notably custom attributes. /// /// Note that the tokens here do not include the outer attributes, but will /// include inner attributes. pub tokens: Option<TokenStream>, } /// A function header /// /// All the information between the visibility & the name of the function is /// included in this struct (e.g. `async unsafe fn` or `const extern "C" fn`) #[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)] pub struct FnHeader { pub unsafety: Unsafety, pub asyncness: IsAsync, pub constness: Spanned<Constness>, pub abi: Abi, } impl Default for FnHeader { fn default() -> FnHeader { FnHeader { unsafety: Unsafety::Normal, asyncness: IsAsync::NotAsync, constness: dummy_spanned(Constness::NotConst), abi: Abi::Rust, } } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum ItemKind { /// An `extern crate` item, with optional *original* crate name if the crate was renamed. /// /// E.g. `extern crate foo` or `extern crate foo_bar as foo` ExternCrate(Option<Name>), /// A use declaration (`use` or `pub use`) item. /// /// E.g. `use foo;`, `use foo::bar;` or `use foo::bar as FooBar;` Use(P<UseTree>), /// A static item (`static` or `pub static`). /// /// E.g. `static FOO: i32 = 42;` or `static FOO: &'static str = "bar";` Static(P<Ty>, Mutability, P<Expr>), /// A constant item (`const` or `pub const`). /// /// E.g. `const FOO: i32 = 42;` Const(P<Ty>, P<Expr>), /// A function declaration (`fn` or `pub fn`). /// /// E.g. `fn foo(bar: usize) -> usize { .. }` Fn(P<FnDecl>, FnHeader, Generics, P<Block>), /// A module declaration (`mod` or `pub mod`). /// /// E.g. `mod foo;` or `mod foo { .. }` Mod(Mod), /// An external module (`extern` or `pub extern`). /// /// E.g. `extern {}` or `extern "C" {}` ForeignMod(ForeignMod), /// Module-level inline assembly (from `global_asm!()`) GlobalAsm(P<GlobalAsm>), /// A type alias (`type` or `pub type`). /// /// E.g. `type Foo = Bar<u8>;` Ty(P<Ty>, Generics), /// An existential type declaration (`existential type`). /// /// E.g. `existential type Foo: Bar + Boo;` Existential(GenericBounds, Generics), /// An enum definition (`enum` or `pub enum`). /// /// E.g. `enum Foo<A, B> { C<A>, D<B> }` Enum(EnumDef, Generics), /// A struct definition (`struct` or `pub struct`). /// /// E.g. `struct Foo<A> { x: A }` Struct(VariantData, Generics), /// A union definition (`union` or `pub union`). /// /// E.g. `union Foo<A, B> { x: A, y: B }` Union(VariantData, Generics), /// A Trait declaration (`trait` or `pub trait`). /// /// E.g. `trait Foo { .. }`, `trait Foo<T> { .. }` or `auto trait Foo {}` Trait(IsAuto, Unsafety, Generics, GenericBounds, Vec<TraitItem>), /// Trait alias /// /// E.g. `trait Foo = Bar + Quux;` TraitAlias(Generics, GenericBounds), /// An implementation. /// /// E.g. `impl<A> Foo<A> { .. }` or `impl<A> Trait for Foo<A> { .. }` Impl(Unsafety, ImplPolarity, Defaultness, Generics, Option<TraitRef>, // (optional) trait this impl implements P<Ty>, // self Vec<ImplItem>), /// A macro invocation. /// /// E.g. `macro_rules! foo { .. }` or `foo!(..)` Mac(Mac), /// A macro definition. MacroDef(MacroDef), } impl ItemKind { pub fn descriptive_variant(&self) -> &str { match *self { ItemKind::ExternCrate(..) => "extern crate", ItemKind::Use(..) => "use", ItemKind::Static(..) => "static item", ItemKind::Const(..) => "constant item", ItemKind::Fn(..) => "function", ItemKind::Mod(..) => "module", ItemKind::ForeignMod(..) => "foreign module", ItemKind::GlobalAsm(..) => "global asm", ItemKind::Ty(..) => "type alias", ItemKind::Existential(..) => "existential type", ItemKind::Enum(..) => "enum", ItemKind::Struct(..) => "struct", ItemKind::Union(..) => "union", ItemKind::Trait(..) => "trait", ItemKind::TraitAlias(..) => "trait alias", ItemKind::Mac(..) | ItemKind::MacroDef(..) | ItemKind::Impl(..) => "item" } } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct ForeignItem { pub ident: Ident, pub attrs: Vec<Attribute>, pub node: ForeignItemKind, pub id: NodeId, pub span: Span, pub vis: Visibility, } /// An item within an `extern` block #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum ForeignItemKind { /// A foreign function Fn(P<FnDecl>, Generics), /// A foreign static item (`static ext: u8`), with optional mutability /// (the boolean is true when mutable) Static(P<Ty>, bool), /// A foreign type Ty, /// A macro invocation Macro(Mac), } impl ForeignItemKind { pub fn descriptive_variant(&self) -> &str { match *self { ForeignItemKind::Fn(..) => "foreign function", ForeignItemKind::Static(..) => "foreign static item", ForeignItemKind::Ty => "foreign type", ForeignItemKind::Macro(..) => "macro in foreign module", } } } #[cfg(test)] mod tests { use serialize; use super::*; // are ASTs encodable? #[test] fn check_asts_encodable() { fn assert_encodable<T: serialize::Encodable>() {} assert_encodable::<Crate>(); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/feature_gate.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Feature gating //! //! This module implements the gating necessary for preventing certain compiler //! features from being used by default. This module will crawl a pre-expanded //! AST to ensure that there are no features which are used that are not //! enabled. //! //! Features are enabled in programs via the crate-level attributes of //! `#![feature(...)]` with a comma-separated list of features. //! //! For the purpose of future feature-tracking, once code for detection of feature //! gate usage is added, *do not remove it again* even once the feature //! becomes stable. use self::AttributeType::*; use self::AttributeGate::*; use rustc_data_structures::fx::FxHashMap; use rustc_target::spec::abi::Abi; use ast::{self, NodeId, PatKind, RangeEnd}; use attr; use source_map::Spanned; use edition::{ALL_EDITIONS, Edition}; use syntax_pos::{Span, DUMMY_SP}; use errors::{DiagnosticBuilder, Handler}; use visit::{self, FnKind, Visitor}; use parse::ParseSess; use symbol::{keywords, Symbol}; use std::{env, path}; macro_rules! set { ($field: ident) => {{ fn f(features: &mut Features, _: Span) { features.$field = true; } f as fn(&mut Features, Span) }} } macro_rules! declare_features { ($((active, $feature: ident, $ver: expr, $issue: expr, $edition: expr),)+) => { /// Represents active features that are currently being implemented or /// currently being considered for addition/removal. const ACTIVE_FEATURES: &'static [(&'static str, &'static str, Option<u32>, Option<Edition>, fn(&mut Features, Span))] = &[$((stringify!($feature), $ver, $issue, $edition, set!($feature))),+]; /// A set of features to be used by later passes. #[derive(Clone)] pub struct Features { /// `#![feature]` attrs for language features, for error reporting pub declared_lang_features: Vec<(Symbol, Span, Option<Symbol>)>, /// `#![feature]` attrs for non-language (library) features pub declared_lib_features: Vec<(Symbol, Span)>, $(pub $feature: bool),+ } impl Features { pub fn new() -> Features { Features { declared_lang_features: Vec::new(), declared_lib_features: Vec::new(), $($feature: false),+ } } pub fn walk_feature_fields<F>(&self, mut f: F) where F: FnMut(&str, bool) { $(f(stringify!($feature), self.$feature);)+ } } }; ($((removed, $feature: ident, $ver: expr, $issue: expr, None, $reason: expr),)+) => { /// Represents unstable features which have since been removed (it was once Active) const REMOVED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[ $((stringify!($feature), $ver, $issue, $reason)),+ ]; }; ($((stable_removed, $feature: ident, $ver: expr, $issue: expr, None),)+) => { /// Represents stable features which have since been removed (it was once Accepted) const STABLE_REMOVED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[ $((stringify!($feature), $ver, $issue, None)),+ ]; }; ($((accepted, $feature: ident, $ver: expr, $issue: expr, None),)+) => { /// Those language feature has since been Accepted (it was once Active) const ACCEPTED_FEATURES: &[(&str, &str, Option<u32>, Option<&str>)] = &[ $((stringify!($feature), $ver, $issue, None)),+ ]; } } // If you change this, please modify src/doc/unstable-book as well. // // Don't ever remove anything from this list; set them to 'Removed'. // // The version numbers here correspond to the version in which the current status // was set. This is most important for knowing when a particular feature became // stable (active). // // NB: tools/tidy/src/features.rs parses this information directly out of the // source, so take care when modifying it. declare_features! ( (active, asm, "1.0.0", Some(29722), None), (active, concat_idents, "1.0.0", Some(29599), None), (active, link_args, "1.0.0", Some(29596), None), (active, log_syntax, "1.0.0", Some(29598), None), (active, non_ascii_idents, "1.0.0", Some(28979), None), (active, plugin_registrar, "1.0.0", Some(29597), None), (active, thread_local, "1.0.0", Some(29594), None), (active, trace_macros, "1.0.0", Some(29598), None), // rustc internal, for now (active, intrinsics, "1.0.0", None, None), (active, lang_items, "1.0.0", None, None), (active, format_args_nl, "1.29.0", None, None), (active, link_llvm_intrinsics, "1.0.0", Some(29602), None), (active, linkage, "1.0.0", Some(29603), None), (active, quote, "1.0.0", Some(29601), None), // rustc internal (active, rustc_diagnostic_macros, "1.0.0", None, None), (active, rustc_const_unstable, "1.0.0", None, None), (active, box_syntax, "1.0.0", Some(49733), None), (active, unboxed_closures, "1.0.0", Some(29625), None), (active, fundamental, "1.0.0", Some(29635), None), (active, main, "1.0.0", Some(29634), None), (active, needs_allocator, "1.4.0", Some(27389), None), (active, on_unimplemented, "1.0.0", Some(29628), None), (active, plugin, "1.0.0", Some(29597), None), (active, simd_ffi, "1.0.0", Some(27731), None), (active, start, "1.0.0", Some(29633), None), (active, structural_match, "1.8.0", Some(31434), None), (active, panic_runtime, "1.10.0", Some(32837), None), (active, needs_panic_runtime, "1.10.0", Some(32837), None), // OIBIT specific features (active, optin_builtin_traits, "1.0.0", Some(13231), None), // Allows use of #[staged_api] // // rustc internal (active, staged_api, "1.0.0", None, None), // Allows using #![no_core] (active, no_core, "1.3.0", Some(29639), None), // Allows using `box` in patterns; RFC 469 (active, box_patterns, "1.0.0", Some(29641), None), // Allows using the unsafe_destructor_blind_to_params attribute; // RFC 1238 (active, dropck_parametricity, "1.3.0", Some(28498), None), // Allows using the may_dangle attribute; RFC 1327 (active, dropck_eyepatch, "1.10.0", Some(34761), None), // Allows the use of custom attributes; RFC 572 (active, custom_attribute, "1.0.0", Some(29642), None), // Allows the use of #[derive(Anything)] as sugar for // #[derive_Anything]. (active, custom_derive, "1.0.0", Some(29644), None), // Allows the use of rustc_* attributes; RFC 572 (active, rustc_attrs, "1.0.0", Some(29642), None), // Allows the use of non lexical lifetimes; RFC 2094 (active, nll, "1.0.0", Some(43234), None), // Allows the use of #[allow_internal_unstable]. This is an // attribute on macro_rules! and can't use the attribute handling // below (it has to be checked before expansion possibly makes // macros disappear). // // rustc internal (active, allow_internal_unstable, "1.0.0", None, None), // Allows the use of #[allow_internal_unsafe]. This is an // attribute on macro_rules! and can't use the attribute handling // below (it has to be checked before expansion possibly makes // macros disappear). // // rustc internal (active, allow_internal_unsafe, "1.0.0", None, None), // #23121. Array patterns have some hazards yet. (active, slice_patterns, "1.0.0", Some(23121), None), // Allows the definition of `const fn` functions. (active, const_fn, "1.2.0", Some(24111), None), // Allows let bindings and destructuring in `const fn` functions and constants. (active, const_let, "1.22.1", Some(48821), None), // Allows accessing fields of unions inside const fn (active, const_fn_union, "1.27.0", Some(51909), None), // Allows casting raw pointers to `usize` during const eval (active, const_raw_ptr_to_usize_cast, "1.27.0", Some(51910), None), // Allows dereferencing raw pointers during const eval (active, const_raw_ptr_deref, "1.27.0", Some(51911), None), // Allows comparing raw pointers during const eval (active, const_compare_raw_pointers, "1.27.0", Some(53020), None), // Allows panicking during const eval (produces compile-time errors) (active, const_panic, "1.30.0", Some(51999), None), // Allows using #[prelude_import] on glob `use` items. // // rustc internal (active, prelude_import, "1.2.0", None, None), // Allows default type parameters to influence type inference. (active, default_type_parameter_fallback, "1.3.0", Some(27336), None), // Allows associated type defaults (active, associated_type_defaults, "1.2.0", Some(29661), None), // Allows `repr(simd)`, and importing the various simd intrinsics (active, repr_simd, "1.4.0", Some(27731), None), // Allows `extern "platform-intrinsic" { ... }` (active, platform_intrinsics, "1.4.0", Some(27731), None), // Allows `#[unwind(..)]` // rustc internal for rust runtime (active, unwind_attributes, "1.4.0", None, None), // Allows the use of `#[naked]` on functions. (active, naked_functions, "1.9.0", Some(32408), None), // Allows `#[no_debug]` (active, no_debug, "1.5.0", Some(29721), None), // Allows `#[omit_gdb_pretty_printer_section]` // // rustc internal (active, omit_gdb_pretty_printer_section, "1.5.0", None, None), // Allows cfg(target_vendor = "..."). (active, cfg_target_vendor, "1.5.0", Some(29718), None), // Allow attributes on expressions and non-item statements (active, stmt_expr_attributes, "1.6.0", Some(15701), None), // allow using type ascription in expressions (active, type_ascription, "1.6.0", Some(23416), None), // Allows cfg(target_thread_local) (active, cfg_target_thread_local, "1.7.0", Some(29594), None), // rustc internal (active, abi_vectorcall, "1.7.0", None, None), // X..Y patterns (active, exclusive_range_pattern, "1.11.0", Some(37854), None), // impl specialization (RFC 1210) (active, specialization, "1.7.0", Some(31844), None), // Allows cfg(target_has_atomic = "..."). (active, cfg_target_has_atomic, "1.9.0", Some(32976), None), // The `!` type. Does not imply exhaustive_patterns (below) any more. (active, never_type, "1.13.0", Some(35121), None), // Allows exhaustive pattern matching on types that contain uninhabited types (active, exhaustive_patterns, "1.13.0", Some(51085), None), // Allows all literals in attribute lists and values of key-value pairs (active, attr_literals, "1.13.0", Some(34981), None), // Allows untagged unions `union U { ... }` (active, untagged_unions, "1.13.0", Some(32836), None), // Used to identify the `compiler_builtins` crate // rustc internal (active, compiler_builtins, "1.13.0", None, None), // Allows #[link(..., cfg(..))] (active, link_cfg, "1.14.0", Some(37406), None), // `extern "ptx-*" fn()` (active, abi_ptx, "1.15.0", Some(38788), None), // The `repr(i128)` annotation for enums (active, repr128, "1.16.0", Some(35118), None), // The `unadjusted` ABI. Perma unstable. // rustc internal (active, abi_unadjusted, "1.16.0", None, None), // Declarative macros 2.0 (`macro`). (active, decl_macro, "1.17.0", Some(39412), None), // Allows #[link(kind="static-nobundle"...)] (active, static_nobundle, "1.16.0", Some(37403), None), // `extern "msp430-interrupt" fn()` (active, abi_msp430_interrupt, "1.16.0", Some(38487), None), // Used to identify crates that contain sanitizer runtimes // rustc internal (active, sanitizer_runtime, "1.17.0", None, None), // Used to identify crates that contain the profiler runtime // // rustc internal (active, profiler_runtime, "1.18.0", None, None), // `extern "x86-interrupt" fn()` (active, abi_x86_interrupt, "1.17.0", Some(40180), None), // Allows the `catch {...}` expression (active, catch_expr, "1.17.0", Some(31436), None), // Used to preserve symbols (see llvm.used) (active, used, "1.18.0", Some(40289), None), // Allows module-level inline assembly by way of global_asm!() (active, global_asm, "1.18.0", Some(35119), None), // Allows overlapping impls of marker traits (active, overlapping_marker_traits, "1.18.0", Some(29864), None), // rustc internal (active, abi_thiscall, "1.19.0", None, None), // Allows a test to fail without failing the whole suite (active, allow_fail, "1.19.0", Some(42219), None), // Allows unsized tuple coercion. (active, unsized_tuple_coercion, "1.20.0", Some(42877), None), // Generators (active, generators, "1.21.0", Some(43122), None), // Trait aliases (active, trait_alias, "1.24.0", Some(41517), None), // rustc internal (active, allocator_internals, "1.20.0", None, None), // #[doc(cfg(...))] (active, doc_cfg, "1.21.0", Some(43781), None), // #[doc(masked)] (active, doc_masked, "1.21.0", Some(44027), None), // #[doc(spotlight)] (active, doc_spotlight, "1.22.0", Some(45040), None), // #[doc(include="some-file")] (active, external_doc, "1.22.0", Some(44732), None), // Future-proofing enums/structs with #[non_exhaustive] attribute (RFC 2008) (active, non_exhaustive, "1.22.0", Some(44109), None), // `crate` as visibility modifier, synonymous to `pub(crate)` (active, crate_visibility_modifier, "1.23.0", Some(45388), Some(Edition::Edition2018)), // extern types (active, extern_types, "1.23.0", Some(43467), None), // Allows trait methods with arbitrary self types (active, arbitrary_self_types, "1.23.0", Some(44874), None), // `crate` in paths (active, crate_in_paths, "1.23.0", Some(45477), Some(Edition::Edition2018)), // In-band lifetime bindings (e.g. `fn foo(x: &'a u8) -> &'a u8`) (active, in_band_lifetimes, "1.23.0", Some(44524), None), // Generic associated types (RFC 1598) (active, generic_associated_types, "1.23.0", Some(44265), None), // Resolve absolute paths as paths from other crates (active, extern_absolute_paths, "1.24.0", Some(44660), Some(Edition::Edition2018)), // `foo.rs` as an alternative to `foo/mod.rs` (active, non_modrs_mods, "1.24.0", Some(44660), Some(Edition::Edition2018)), // `extern` in paths (active, extern_in_paths, "1.23.0", Some(44660), None), // Use `?` as the Kleene "at most one" operator (active, macro_at_most_once_rep, "1.25.0", Some(48075), None), // Infer outlives requirements; RFC 2093 (active, infer_outlives_requirements, "1.26.0", Some(44493), None), // Infer static outlives requirements; RFC 2093 (active, infer_static_outlives_requirements, "1.26.0", Some(44493), None), // Multiple patterns with `|` in `if let` and `while let` (active, if_while_or_patterns, "1.26.0", Some(48215), None), // Parentheses in patterns (active, pattern_parentheses, "1.26.0", Some(51087), None), // Allows `#[repr(packed)]` attribute on structs (active, repr_packed, "1.26.0", Some(33158), None), // `use path as _;` and `extern crate c as _;` (active, underscore_imports, "1.26.0", Some(48216), None), // Allows macro invocations in `extern {}` blocks (active, macros_in_extern, "1.27.0", Some(49476), None), // `existential type` (active, existential_type, "1.28.0", Some(34511), None), // unstable #[target_feature] directives (active, arm_target_feature, "1.27.0", Some(44839), None), (active, aarch64_target_feature, "1.27.0", Some(44839), None), (active, hexagon_target_feature, "1.27.0", Some(44839), None), (active, powerpc_target_feature, "1.27.0", Some(44839), None), (active, mips_target_feature, "1.27.0", Some(44839), None), (active, avx512_target_feature, "1.27.0", Some(44839), None), (active, mmx_target_feature, "1.27.0", Some(44839), None), (active, sse4a_target_feature, "1.27.0", Some(44839), None), (active, tbm_target_feature, "1.27.0", Some(44839), None), (active, wasm_target_feature, "1.30.0", Some(44839), None), // Allows macro invocations of the form `#[foo::bar]` (active, proc_macro_path_invoc, "1.27.0", Some(38356), None), // Allows macro invocations on modules expressions and statements and // procedural macros to expand to non-items. (active, proc_macro_mod, "1.27.0", Some(38356), None), (active, proc_macro_expr, "1.27.0", Some(38356), None), (active, proc_macro_non_items, "1.27.0", Some(38356), None), (active, proc_macro_gen, "1.27.0", Some(38356), None), // #[doc(alias = "...")] (active, doc_alias, "1.27.0", Some(50146), None), // Access to crate names passed via `--extern` through prelude (active, extern_prelude, "1.27.0", Some(44660), Some(Edition::Edition2018)), // Scoped attributes (active, tool_attributes, "1.25.0", Some(44690), None), // Scoped lints (active, tool_lints, "1.28.0", Some(44690), None), // Allows irrefutable patterns in if-let and while-let statements (RFC 2086) (active, irrefutable_let_patterns, "1.27.0", Some(44495), None), // Allows use of the :literal macro fragment specifier (RFC 1576) (active, macro_literal_matcher, "1.27.0", Some(35625), None), // inconsistent bounds in where clauses (active, trivial_bounds, "1.28.0", Some(48214), None), // 'a: { break 'a; } (active, label_break_value, "1.28.0", Some(48594), None), // Integer match exhaustiveness checking (active, exhaustive_integer_patterns, "1.30.0", Some(50907), None), // #[panic_implementation] (active, panic_implementation, "1.28.0", Some(44489), None), // #[doc(keyword = "...")] (active, doc_keyword, "1.28.0", Some(51315), None), // Allows async and await syntax (active, async_await, "1.28.0", Some(50547), None), // #[alloc_error_handler] (active, alloc_error_handler, "1.29.0", Some(51540), None), (active, abi_amdgpu_kernel, "1.29.0", Some(51575), None), // impl<I:Iterator> Iterator for &mut Iterator // impl Debug for Foo<'_> (active, impl_header_lifetime_elision, "1.30.0", Some(15872), Some(Edition::Edition2018)), // Support for arbitrary delimited token streams in non-macro attributes (active, unrestricted_attribute_tokens, "1.30.0", Some(44690), None), // Allows `use x::y;` to resolve through `self::x`, not just `::x` (active, uniform_paths, "1.30.0", Some(53130), None), // Allows `Self` in type definitions (active, self_in_typedefs, "1.30.0", Some(49303), None), // unsized rvalues at arguments and parameters (active, unsized_locals, "1.30.0", Some(48055), None), ); declare_features! ( (removed, import_shadowing, "1.0.0", None, None, None), (removed, managed_boxes, "1.0.0", None, None, None), // Allows use of unary negate on unsigned integers, e.g. -e for e: u8 (removed, negate_unsigned, "1.0.0", Some(29645), None, None), (removed, reflect, "1.0.0", Some(27749), None, None), // A way to temporarily opt out of opt in copy. This will *never* be accepted. (removed, opt_out_copy, "1.0.0", None, None, None), (removed, quad_precision_float, "1.0.0", None, None, None), (removed, struct_inherit, "1.0.0", None, None, None), (removed, test_removed_feature, "1.0.0", None, None, None), (removed, visible_private_types, "1.0.0", None, None, None), (removed, unsafe_no_drop_flag, "1.0.0", None, None, None), // Allows using items which are missing stability attributes // rustc internal (removed, unmarked_api, "1.0.0", None, None, None), (removed, pushpop_unsafe, "1.2.0", None, None, None), (removed, allocator, "1.0.0", None, None, None), (removed, simd, "1.0.0", Some(27731), None, Some("removed in favor of `#[repr(simd)]`")), (removed, advanced_slice_patterns, "1.0.0", Some(23121), None, Some("merged into `#![feature(slice_patterns)]`")), (removed, macro_reexport, "1.0.0", Some(29638), None, Some("subsumed by `pub use`")), ); declare_features! ( (stable_removed, no_stack_check, "1.0.0", None, None), ); declare_features! ( (accepted, associated_types, "1.0.0", None, None), // allow overloading augmented assignment operations like `a += b` (accepted, augmented_assignments, "1.8.0", Some(28235), None), // allow empty structs and enum variants with braces (accepted, braced_empty_structs, "1.8.0", Some(29720), None), // Allows indexing into constant arrays. (accepted, const_indexing, "1.26.0", Some(29947), None), (accepted, default_type_params, "1.0.0", None, None), (accepted, globs, "1.0.0", None, None), (accepted, if_let, "1.0.0", None, None), // A temporary feature gate used to enable parser extensions needed // to bootstrap fix for #5723. (accepted, issue_5723_bootstrap, "1.0.0", None, None), (accepted, macro_rules, "1.0.0", None, None), // Allows using #![no_std] (accepted, no_std, "1.6.0", None, None), (accepted, slicing_syntax, "1.0.0", None, None), (accepted, struct_variant, "1.0.0", None, None), // These are used to test this portion of the compiler, they don't actually // mean anything (accepted, test_accepted_feature, "1.0.0", None, None), (accepted, tuple_indexing, "1.0.0", None, None), // Allows macros to appear in the type position. (accepted, type_macros, "1.13.0", Some(27245), None), (accepted, while_let, "1.0.0", None, None), // Allows `#[deprecated]` attribute (accepted, deprecated, "1.9.0", Some(29935), None), // `expr?` (accepted, question_mark, "1.13.0", Some(31436), None), // Allows `..` in tuple (struct) patterns (accepted, dotdot_in_tuple_patterns, "1.14.0", Some(33627), None), (accepted, item_like_imports, "1.15.0", Some(35120), None), // Allows using `Self` and associated types in struct expressions and patterns. (accepted, more_struct_aliases, "1.16.0", Some(37544), None), // elide `'static` lifetimes in `static`s and `const`s (accepted, static_in_const, "1.17.0", Some(35897), None), // Allows field shorthands (`x` meaning `x: x`) in struct literal expressions. (accepted, field_init_shorthand, "1.17.0", Some(37340), None), // Allows the definition recursive static items. (accepted, static_recursion, "1.17.0", Some(29719), None), // pub(restricted) visibilities (RFC 1422) (accepted, pub_restricted, "1.18.0", Some(32409), None), // The #![windows_subsystem] attribute (accepted, windows_subsystem, "1.18.0", Some(37499), None), // Allows `break {expr}` with a value inside `loop`s. (accepted, loop_break_value, "1.19.0", Some(37339), None), // Permits numeric fields in struct expressions and patterns. (accepted, relaxed_adts, "1.19.0", Some(35626), None), // Coerces non capturing closures to function pointers (accepted, closure_to_fn_coercion, "1.19.0", Some(39817), None), // Allows attributes on struct literal fields. (accepted, struct_field_attributes, "1.20.0", Some(38814), None), // Allows the definition of associated constants in `trait` or `impl` // blocks. (accepted, associated_consts, "1.20.0", Some(29646), None), // Usage of the `compile_error!` macro (accepted, compile_error, "1.20.0", Some(40872), None), // See rust-lang/rfcs#1414. Allows code like `let x: &'static u32 = &42` to work. (accepted, rvalue_static_promotion, "1.21.0", Some(38865), None), // Allow Drop types in constants (RFC 1440) (accepted, drop_types_in_const, "1.22.0", Some(33156), None), // Allows the sysV64 ABI to be specified on all platforms // instead of just the platforms on which it is the C ABI (accepted, abi_sysv64, "1.24.0", Some(36167), None), // Allows `repr(align(16))` struct attribute (RFC 1358) (accepted, repr_align, "1.25.0", Some(33626), None), // allow '|' at beginning of match arms (RFC 1925) (accepted, match_beginning_vert, "1.25.0", Some(44101), None), // Nested groups in `use` (RFC 2128) (accepted, use_nested_groups, "1.25.0", Some(44494), None), // a..=b and ..=b (accepted, inclusive_range_syntax, "1.26.0", Some(28237), None), // allow `..=` in patterns (RFC 1192) (accepted, dotdoteq_in_patterns, "1.26.0", Some(28237), None), // Termination trait in main (RFC 1937) (accepted, termination_trait, "1.26.0", Some(43301), None), // Copy/Clone closures (RFC 2132) (accepted, clone_closures, "1.26.0", Some(44490), None), (accepted, copy_closures, "1.26.0", Some(44490), None), // Allows `impl Trait` in function arguments. (accepted, universal_impl_trait, "1.26.0", Some(34511), None), // Allows `impl Trait` in function return types. (accepted, conservative_impl_trait, "1.26.0", Some(34511), None), // The `i128` type (accepted, i128_type, "1.26.0", Some(35118), None), // Default match binding modes (RFC 2005) (accepted, match_default_bindings, "1.26.0", Some(42640), None), // allow `'_` placeholder lifetimes (accepted, underscore_lifetimes, "1.26.0", Some(44524), None), // Allows attributes on lifetime/type formal parameters in generics (RFC 1327) (accepted, generic_param_attrs, "1.27.0", Some(48848), None), // Allows cfg(target_feature = "..."). (accepted, cfg_target_feature, "1.27.0", Some(29717), None), // Allows #[target_feature(...)] (accepted, target_feature, "1.27.0", None, None), // Trait object syntax with `dyn` prefix (accepted, dyn_trait, "1.27.0", Some(44662), None), // allow `#[must_use]` on functions; and, must-use operators (RFC 1940) (accepted, fn_must_use, "1.27.0", Some(43302), None), // Allows use of the :lifetime macro fragment specifier (accepted, macro_lifetime_matcher, "1.27.0", Some(34303), None), // Termination trait in tests (RFC 1937) (accepted, termination_trait_test, "1.27.0", Some(48854), None), // The #[global_allocator] attribute (accepted, global_allocator, "1.28.0", Some(27389), None), // Allows `#[repr(transparent)]` attribute on newtype structs (accepted, repr_transparent, "1.28.0", Some(43036), None), // Defining procedural macros in `proc-macro` crates (accepted, proc_macro, "1.29.0", Some(38356), None), // Allows use of the :vis macro fragment specifier (accepted, macro_vis_matcher, "1.29.0", Some(41022), None), // Allows importing and reexporting macros with `use`, // enables macro modularization in general. (accepted, use_extern_macros, "1.30.0", Some(35896), None), // Allows keywords to be escaped for use as identifiers (accepted, raw_identifiers, "1.30.0", Some(48589), None), ); // If you change this, please modify src/doc/unstable-book as well. You must // move that documentation into the relevant place in the other docs, and // remove the chapter on the flag. #[derive(Copy, Clone, PartialEq, Debug)] pub enum AttributeType { /// Normal, builtin attribute that is consumed /// by the compiler before the unused_attribute check Normal, /// Builtin attribute that may not be consumed by the compiler /// before the unused_attribute check. These attributes /// will be ignored by the unused_attribute lint Whitelisted, /// Builtin attribute that is only allowed at the crate level CrateLevel, } pub enum AttributeGate { /// Is gated by a given feature gate, reason /// and function to check if enabled Gated(Stability, &'static str, &'static str, fn(&Features) -> bool), /// Ungated attribute, can be used on all release channels Ungated, } impl AttributeGate { fn is_deprecated(&self) -> bool { match *self { Gated(Stability::Deprecated(_), ..) => true, _ => false, } } } #[derive(Copy, Clone, Debug)] pub enum Stability { Unstable, // Argument is tracking issue link. Deprecated(&'static str), } // fn() is not Debug impl ::std::fmt::Debug for AttributeGate { fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { match *self { Gated(ref stab, name, expl, _) => write!(fmt, "Gated({:?}, {}, {})", stab, name, expl), Ungated => write!(fmt, "Ungated") } } } macro_rules! cfg_fn { ($field: ident) => {{ fn f(features: &Features) -> bool { features.$field } f as fn(&Features) -> bool }} } pub fn deprecated_attributes() -> Vec<&'static (&'static str, AttributeType, AttributeGate)> { BUILTIN_ATTRIBUTES.iter().filter(|a| a.2.is_deprecated()).collect() } pub fn is_builtin_attr_name(name: ast::Name) -> bool { BUILTIN_ATTRIBUTES.iter().any(|&(builtin_name, _, _)| name == builtin_name) } pub fn is_builtin_attr(attr: &ast::Attribute) -> bool { BUILTIN_ATTRIBUTES.iter().any(|&(builtin_name, _, _)| attr.path == builtin_name) } // Attributes that have a special meaning to rustc or rustdoc pub const BUILTIN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGate)] = &[ // Normal attributes ("warn", Normal, Ungated), ("allow", Normal, Ungated), ("forbid", Normal, Ungated), ("deny", Normal, Ungated), ("macro_use", Normal, Ungated), ("macro_export", Normal, Ungated), ("plugin_registrar", Normal, Ungated), ("cfg", Normal, Ungated), ("cfg_attr", Normal, Ungated), ("main", Normal, Ungated), ("start", Normal, Ungated), ("test", Normal, Ungated), ("bench", Normal, Ungated), ("repr", Normal, Ungated), ("path", Normal, Ungated), ("abi", Normal, Ungated), ("automatically_derived", Normal, Ungated), ("no_mangle", Normal, Ungated), ("no_link", Normal, Ungated), ("derive", Normal, Ungated), ("should_panic", Normal, Ungated), ("ignore", Normal, Ungated), ("no_implicit_prelude", Normal, Ungated), ("reexport_test_harness_main", Normal, Ungated), ("link_args", Normal, Gated(Stability::Unstable, "link_args", "the `link_args` attribute is experimental and not \ portable across platforms, it is recommended to \ use `#[link(name = \"foo\")] instead", cfg_fn!(link_args))), ("macro_escape", Normal, Ungated), // RFC #1445. ("structural_match", Whitelisted, Gated(Stability::Unstable, "structural_match", "the semantics of constant patterns is \ not yet settled", cfg_fn!(structural_match))), // RFC #2008 ("non_exhaustive", Whitelisted, Gated(Stability::Unstable, "non_exhaustive", "non exhaustive is an experimental feature", cfg_fn!(non_exhaustive))), ("plugin", CrateLevel, Gated(Stability::Unstable, "plugin", "compiler plugins are experimental \ and possibly buggy", cfg_fn!(plugin))), ("no_std", CrateLevel, Ungated), ("no_core", CrateLevel, Gated(Stability::Unstable, "no_core", "no_core is experimental", cfg_fn!(no_core))), ("lang", Normal, Gated(Stability::Unstable, "lang_items", "language items are subject to change", cfg_fn!(lang_items))), ("linkage", Whitelisted, Gated(Stability::Unstable, "linkage", "the `linkage` attribute is experimental \ and not portable across platforms", cfg_fn!(linkage))), ("thread_local", Whitelisted, Gated(Stability::Unstable, "thread_local", "`#[thread_local]` is an experimental feature, and does \ not currently handle destructors.", cfg_fn!(thread_local))), ("rustc_on_unimplemented", Normal, Gated(Stability::Unstable, "on_unimplemented", "the `#[rustc_on_unimplemented]` attribute \ is an experimental feature", cfg_fn!(on_unimplemented))), ("rustc_const_unstable", Normal, Gated(Stability::Unstable, "rustc_const_unstable", "the `#[rustc_const_unstable]` attribute \ is an internal feature", cfg_fn!(rustc_const_unstable))), ("global_allocator", Normal, Ungated), ("default_lib_allocator", Whitelisted, Gated(Stability::Unstable, "allocator_internals", "the `#[default_lib_allocator]` \ attribute is an experimental feature", cfg_fn!(allocator_internals))), ("needs_allocator", Normal, Gated(Stability::Unstable, "allocator_internals", "the `#[needs_allocator]` \ attribute is an experimental \ feature", cfg_fn!(allocator_internals))), ("panic_runtime", Whitelisted, Gated(Stability::Unstable, "panic_runtime", "the `#[panic_runtime]` attribute is \ an experimental feature", cfg_fn!(panic_runtime))), ("needs_panic_runtime", Whitelisted, Gated(Stability::Unstable, "needs_panic_runtime", "the `#[needs_panic_runtime]` \ attribute is an experimental \ feature", cfg_fn!(needs_panic_runtime))), ("rustc_outlives", Normal, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_outlives]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_variance", Normal, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_variance]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_regions", Normal, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_regions]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_error", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_error]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_if_this_changed", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_if_this_changed]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_then_this_would_need", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_if_this_changed]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_dirty", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_dirty]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_clean", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_clean]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_partition_reused", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_partition_codegened", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_synthetic", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_symbol_name", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "internal rustc attributes will never be stable", cfg_fn!(rustc_attrs))), ("rustc_item_path", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "internal rustc attributes will never be stable", cfg_fn!(rustc_attrs))), ("rustc_mir", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_mir]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_inherit_overflow_checks", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_inherit_overflow_checks]` \ attribute is just used to control \ overflow checking behavior of several \ libcore functions that are inlined \ across crates and will never be stable", cfg_fn!(rustc_attrs))), ("rustc_dump_program_clauses", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "the `#[rustc_dump_program_clauses]` \ attribute is just used for rustc unit \ tests and will never be stable", cfg_fn!(rustc_attrs))), // RFC #2094 ("nll", Whitelisted, Gated(Stability::Unstable, "nll", "Non lexical lifetimes", cfg_fn!(nll))), ("compiler_builtins", Whitelisted, Gated(Stability::Unstable, "compiler_builtins", "the `#[compiler_builtins]` attribute is used to \ identify the `compiler_builtins` crate which \ contains compiler-rt intrinsics and will never be \ stable", cfg_fn!(compiler_builtins))), ("sanitizer_runtime", Whitelisted, Gated(Stability::Unstable, "sanitizer_runtime", "the `#[sanitizer_runtime]` attribute is used to \ identify crates that contain the runtime of a \ sanitizer and will never be stable", cfg_fn!(sanitizer_runtime))), ("profiler_runtime", Whitelisted, Gated(Stability::Unstable, "profiler_runtime", "the `#[profiler_runtime]` attribute is used to \ identify the `profiler_builtins` crate which \ contains the profiler runtime and will never be \ stable", cfg_fn!(profiler_runtime))), ("allow_internal_unstable", Normal, Gated(Stability::Unstable, "allow_internal_unstable", EXPLAIN_ALLOW_INTERNAL_UNSTABLE, cfg_fn!(allow_internal_unstable))), ("allow_internal_unsafe", Normal, Gated(Stability::Unstable, "allow_internal_unsafe", EXPLAIN_ALLOW_INTERNAL_UNSAFE, cfg_fn!(allow_internal_unsafe))), ("fundamental", Whitelisted, Gated(Stability::Unstable, "fundamental", "the `#[fundamental]` attribute \ is an experimental feature", cfg_fn!(fundamental))), ("proc_macro_derive", Normal, Ungated), ("rustc_copy_clone_marker", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "internal implementation detail", cfg_fn!(rustc_attrs))), // FIXME: #14408 whitelist docs since rustdoc looks at them ("doc", Whitelisted, Ungated), // FIXME: #14406 these are processed in codegen, which happens after the // lint pass ("cold", Whitelisted, Ungated), ("naked", Whitelisted, Gated(Stability::Unstable, "naked_functions", "the `#[naked]` attribute \ is an experimental feature", cfg_fn!(naked_functions))), ("target_feature", Whitelisted, Ungated), ("export_name", Whitelisted, Ungated), ("inline", Whitelisted, Ungated), ("link", Whitelisted, Ungated), ("link_name", Whitelisted, Ungated), ("link_section", Whitelisted, Ungated), ("no_builtins", Whitelisted, Ungated), ("no_mangle", Whitelisted, Ungated), ("no_debug", Whitelisted, Gated( Stability::Deprecated("https://github.com/rust-lang/rust/issues/29721"), "no_debug", "the `#[no_debug]` attribute was an experimental feature that has been \ deprecated due to lack of demand", cfg_fn!(no_debug))), ("omit_gdb_pretty_printer_section", Whitelisted, Gated(Stability::Unstable, "omit_gdb_pretty_printer_section", "the `#[omit_gdb_pretty_printer_section]` \ attribute is just used for the Rust test \ suite", cfg_fn!(omit_gdb_pretty_printer_section))), ("unsafe_destructor_blind_to_params", Normal, Gated(Stability::Deprecated("https://github.com/rust-lang/rust/issues/34761"), "dropck_parametricity", "unsafe_destructor_blind_to_params has been replaced by \ may_dangle and will be removed in the future", cfg_fn!(dropck_parametricity))), ("may_dangle", Normal, Gated(Stability::Unstable, "dropck_eyepatch", "may_dangle has unstable semantics and may be removed in the future", cfg_fn!(dropck_eyepatch))), ("unwind", Whitelisted, Gated(Stability::Unstable, "unwind_attributes", "#[unwind] is experimental", cfg_fn!(unwind_attributes))), ("used", Whitelisted, Gated( Stability::Unstable, "used", "the `#[used]` attribute is an experimental feature", cfg_fn!(used))), // used in resolve ("prelude_import", Whitelisted, Gated(Stability::Unstable, "prelude_import", "`#[prelude_import]` is for use by rustc only", cfg_fn!(prelude_import))), // FIXME: #14407 these are only looked at on-demand so we can't // guarantee they'll have already been checked ("rustc_deprecated", Whitelisted, Ungated), ("must_use", Whitelisted, Ungated), ("stable", Whitelisted, Ungated), ("unstable", Whitelisted, Ungated), ("deprecated", Normal, Ungated), ("rustc_paren_sugar", Normal, Gated(Stability::Unstable, "unboxed_closures", "unboxed_closures are still evolving", cfg_fn!(unboxed_closures))), ("windows_subsystem", Whitelisted, Ungated), ("proc_macro_attribute", Normal, Ungated), ("proc_macro", Normal, Ungated), ("rustc_derive_registrar", Normal, Gated(Stability::Unstable, "rustc_derive_registrar", "used internally by rustc", cfg_fn!(rustc_attrs))), ("allow_fail", Normal, Gated(Stability::Unstable, "allow_fail", "allow_fail attribute is currently unstable", cfg_fn!(allow_fail))), ("rustc_std_internal_symbol", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "this is an internal attribute that will \ never be stable", cfg_fn!(rustc_attrs))), // whitelists "identity-like" conversion methods to suggest on type mismatch ("rustc_conversion_suggestion", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "this is an internal attribute that will \ never be stable", cfg_fn!(rustc_attrs))), ("rustc_args_required_const", Whitelisted, Gated(Stability::Unstable, "rustc_attrs", "never will be stable", cfg_fn!(rustc_attrs))), // RFC #2093 ("infer_outlives_requirements", Normal, Gated(Stability::Unstable, "infer_outlives_requirements", "infer outlives requirements is an experimental feature", cfg_fn!(infer_outlives_requirements))), // RFC #2093 ("infer_static_outlives_requirements", Normal, Gated(Stability::Unstable, "infer_static_outlives_requirements", "infer 'static lifetime requirements", cfg_fn!(infer_static_outlives_requirements))), // RFC 2070 ("panic_implementation", Normal, Gated(Stability::Unstable, "panic_implementation", "#[panic_implementation] is an unstable feature", cfg_fn!(panic_implementation))), ("alloc_error_handler", Normal, Gated(Stability::Unstable, "alloc_error_handler", "#[alloc_error_handler] is an unstable feature", cfg_fn!(alloc_error_handler))), // Crate level attributes ("crate_name", CrateLevel, Ungated), ("crate_type", CrateLevel, Ungated), ("crate_id", CrateLevel, Ungated), ("feature", CrateLevel, Ungated), ("no_start", CrateLevel, Ungated), ("no_main", CrateLevel, Ungated), ("no_builtins", CrateLevel, Ungated), ("recursion_limit", CrateLevel, Ungated), ("type_length_limit", CrateLevel, Ungated), ]; // cfg(...)'s that are feature gated const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[ // (name in cfg, feature, function to check if the feature is enabled) ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)), ("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)), ("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)), ]; #[derive(Debug)] pub struct GatedCfg { span: Span, index: usize, } impl GatedCfg { pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> { let name = cfg.name().as_str(); GATED_CFGS.iter() .position(|info| info.0 == name) .map(|idx| { GatedCfg { span: cfg.span, index: idx } }) } pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) { let (cfg, feature, has_feature) = GATED_CFGS[self.index]; if !has_feature(features) && !self.span.allows_unstable() { let explain = format!("`cfg({})` is experimental and subject to change", cfg); emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain); } } } struct Context<'a> { features: &'a Features, parse_sess: &'a ParseSess, plugin_attributes: &'a [(String, AttributeType)], } macro_rules! gate_feature_fn { ($cx: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr, $level: expr) => {{ let (cx, has_feature, span, name, explain, level) = ($cx, $has_feature, $span, $name, $explain, $level); let has_feature: bool = has_feature(&$cx.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); if !has_feature && !span.allows_unstable() { leveled_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain, level) .emit(); } }} } macro_rules! gate_feature { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => { gate_feature_fn!($cx, |x:&Features| x.$feature, $span, stringify!($feature), $explain, GateStrength::Hard) }; ($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => { gate_feature_fn!($cx, |x:&Features| x.$feature, $span, stringify!($feature), $explain, $level) }; } impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); let name = attr.name().as_str(); for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES { if name == n { if let Gated(_, name, desc, ref has_feature) = *gateage { gate_feature_fn!(self, has_feature, attr.span, name, desc, GateStrength::Hard); } else if name == "doc" { if let Some(content) = attr.meta_item_list() { if content.iter().any(|c| c.check_name("include")) { gate_feature!(self, external_doc, attr.span, "#[doc(include = \"...\")] is experimental" ); } } } debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage); return; } } for &(ref n, ref ty) in self.plugin_attributes { if attr.path == &**n { // Plugins can't gate attributes, so we don't check for it // unlike the code above; we only use this loop to // short-circuit to avoid the checks below debug!("check_attribute: {:?} is registered by a plugin, {:?}", attr.path, ty); return; } } if name.starts_with("rustc_") { gate_feature!(self, rustc_attrs, attr.span, "unless otherwise specified, attributes \ with the prefix `rustc_` \ are reserved for internal compiler diagnostics"); } else if name.starts_with("derive_") { gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE); } else if !attr::is_known(attr) { // Only run the custom attribute lint during regular // feature gate checking. Macro gating runs // before the plugin attributes are registered // so we skip this then if !is_macro { let msg = format!("The attribute `{}` is currently unknown to the compiler and \ may have meaning added to it in the future", attr.path); gate_feature!(self, custom_attribute, attr.span, &msg); } } } } pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] }; cx.check_attribute(attr, true); } fn find_lang_feature_issue(feature: &str) -> Option<u32> { if let Some(info) = ACTIVE_FEATURES.iter().find(|t| t.0 == feature) { let issue = info.2; // FIXME (#28244): enforce that active features have issue numbers // assert!(issue.is_some()) issue } else { // search in Accepted, Removed, or Stable Removed features let found = ACCEPTED_FEATURES.iter().chain(REMOVED_FEATURES).chain(STABLE_REMOVED_FEATURES) .find(|t| t.0 == feature); match found { Some(&(_, _, issue, _)) => issue, None => panic!("Feature `{}` is not declared anywhere", feature), } } } pub enum GateIssue { Language, Library(Option<u32>) } #[derive(Debug, Copy, Clone, PartialEq)] pub enum GateStrength { /// A hard error. (Most feature gates should use this.) Hard, /// Only a warning. (Use this only as backwards-compatibility demands.) Soft, } pub fn emit_feature_err(sess: &ParseSess, feature: &str, span: Span, issue: GateIssue, explain: &str) { feature_err(sess, feature, span, issue, explain).emit(); } pub fn feature_err<'a>(sess: &'a ParseSess, feature: &str, span: Span, issue: GateIssue, explain: &str) -> DiagnosticBuilder<'a> { leveled_feature_err(sess, feature, span, issue, explain, GateStrength::Hard) } fn leveled_feature_err<'a>(sess: &'a ParseSess, feature: &str, span: Span, issue: GateIssue, explain: &str, level: GateStrength) -> DiagnosticBuilder<'a> { let diag = &sess.span_diagnostic; let issue = match issue { GateIssue::Language => find_lang_feature_issue(feature), GateIssue::Library(lib) => lib, }; let explanation = match issue { None | Some(0) => explain.to_owned(), Some(n) => format!("{} (see issue #{})", explain, n) }; let mut err = match level { GateStrength::Hard => { diag.struct_span_err_with_code(span, &explanation, stringify_error_code!(E0658)) } GateStrength::Soft => diag.struct_span_warn(span, &explanation), }; // #23973: do not suggest `#![feature(...)]` if we are in beta/stable if sess.unstable_features.is_nightly_build() { err.help(&format!("add #![feature({})] to the \ crate attributes to enable", feature)); } // If we're on stable and only emitting a "soft" warning, add a note to // clarify that the feature isn't "on" (rather than being on but // warning-worthy). if !sess.unstable_features.is_nightly_build() && level == GateStrength::Soft { err.help("a nightly build of the compiler is required to enable this feature"); } err } const EXPLAIN_BOX_SYNTAX: &'static str = "box expression syntax is experimental; you can call `Box::new` instead."; pub const EXPLAIN_STMT_ATTR_SYNTAX: &'static str = "attributes on expressions are experimental."; pub const EXPLAIN_ASM: &'static str = "inline assembly is not stable enough for use and is subject to change"; pub const EXPLAIN_GLOBAL_ASM: &'static str = "`global_asm!` is not stable enough for use and is subject to change"; pub const EXPLAIN_LOG_SYNTAX: &'static str = "`log_syntax!` is not stable enough for use and is subject to change"; pub const EXPLAIN_CONCAT_IDENTS: &'static str = "`concat_idents` is not stable enough for use and is subject to change"; pub const EXPLAIN_FORMAT_ARGS_NL: &'static str = "`format_args_nl` is only for internal language use and is subject to change"; pub const EXPLAIN_TRACE_MACROS: &'static str = "`trace_macros` is not stable enough for use and is subject to change"; pub const EXPLAIN_ALLOW_INTERNAL_UNSTABLE: &'static str = "allow_internal_unstable side-steps feature gating and stability checks"; pub const EXPLAIN_ALLOW_INTERNAL_UNSAFE: &'static str = "allow_internal_unsafe side-steps the unsafe_code lint"; pub const EXPLAIN_CUSTOM_DERIVE: &'static str = "`#[derive]` for custom traits is deprecated and will be removed in the future."; pub const EXPLAIN_DEPR_CUSTOM_DERIVE: &'static str = "`#[derive]` for custom traits is deprecated and will be removed in the future. \ Prefer using procedural macro custom derive."; pub const EXPLAIN_DERIVE_UNDERSCORE: &'static str = "attributes of the form `#[derive_*]` are reserved for the compiler"; pub const EXPLAIN_LITERAL_MATCHER: &'static str = ":literal fragment specifier is experimental and subject to change"; pub const EXPLAIN_UNSIZED_TUPLE_COERCION: &'static str = "unsized tuple coercion is not stable enough for use and is subject to change"; pub const EXPLAIN_MACRO_AT_MOST_ONCE_REP: &'static str = "using the `?` macro Kleene operator for \"at most one\" repetition is unstable"; struct PostExpansionVisitor<'a> { context: &'a Context<'a>, } macro_rules! gate_feature_post { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{ let (cx, span) = ($cx, $span); if !span.allows_unstable() { gate_feature!(cx.context, $feature, span, $explain) } }}; ($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => {{ let (cx, span) = ($cx, $span); if !span.allows_unstable() { gate_feature!(cx.context, $feature, span, $explain, $level) } }} } impl<'a> PostExpansionVisitor<'a> { fn check_abi(&self, abi: Abi, span: Span) { match abi { Abi::RustIntrinsic => { gate_feature_post!(&self, intrinsics, span, "intrinsics are subject to change"); }, Abi::PlatformIntrinsic => { gate_feature_post!(&self, platform_intrinsics, span, "platform intrinsics are experimental and possibly buggy"); }, Abi::Vectorcall => { gate_feature_post!(&self, abi_vectorcall, span, "vectorcall is experimental and subject to change"); }, Abi::Thiscall => { gate_feature_post!(&self, abi_thiscall, span, "thiscall is experimental and subject to change"); }, Abi::RustCall => { gate_feature_post!(&self, unboxed_closures, span, "rust-call ABI is subject to change"); }, Abi::PtxKernel => { gate_feature_post!(&self, abi_ptx, span, "PTX ABIs are experimental and subject to change"); }, Abi::Unadjusted => { gate_feature_post!(&self, abi_unadjusted, span, "unadjusted ABI is an implementation detail and perma-unstable"); }, Abi::Msp430Interrupt => { gate_feature_post!(&self, abi_msp430_interrupt, span, "msp430-interrupt ABI is experimental and subject to change"); }, Abi::X86Interrupt => { gate_feature_post!(&self, abi_x86_interrupt, span, "x86-interrupt ABI is experimental and subject to change"); }, Abi::AmdGpuKernel => { gate_feature_post!(&self, abi_amdgpu_kernel, span, "amdgpu-kernel ABI is experimental and subject to change"); }, // Stable Abi::Cdecl | Abi::Stdcall | Abi::Fastcall | Abi::Aapcs | Abi::Win64 | Abi::SysV64 | Abi::Rust | Abi::C | Abi::System => {} } } } fn contains_novel_literal(item: &ast::MetaItem) -> bool { use ast::MetaItemKind::*; use ast::NestedMetaItemKind::*; match item.node { Word => false, NameValue(ref lit) => !lit.node.is_str(), List(ref list) => list.iter().any(|li| { match li.node { MetaItem(ref mi) => contains_novel_literal(mi), Literal(_) => true, } }), } } impl<'a> PostExpansionVisitor<'a> { fn whole_crate_feature_gates(&mut self, _krate: &ast::Crate) { for &(ident, span) in &*self.context.parse_sess.non_modrs_mods.borrow() { if !span.allows_unstable() { let cx = &self.context; let level = GateStrength::Hard; let has_feature = cx.features.non_modrs_mods; let name = "non_modrs_mods"; debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); if !has_feature && !span.allows_unstable() { leveled_feature_err( cx.parse_sess, name, span, GateIssue::Language, "mod statements in non-mod.rs files are unstable", level ) .help(&format!("on stable builds, rename this file to {}{}mod.rs", ident, path::MAIN_SEPARATOR)) .emit(); } } } } } impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { if !attr.span.allows_unstable() { // check for gated attributes self.context.check_attribute(attr, false); } if attr.check_name("doc") { if let Some(content) = attr.meta_item_list() { if content.len() == 1 && content[0].check_name("cfg") { gate_feature_post!(&self, doc_cfg, attr.span, "#[doc(cfg(...))] is experimental" ); } else if content.iter().any(|c| c.check_name("masked")) { gate_feature_post!(&self, doc_masked, attr.span, "#[doc(masked)] is experimental" ); } else if content.iter().any(|c| c.check_name("spotlight")) { gate_feature_post!(&self, doc_spotlight, attr.span, "#[doc(spotlight)] is experimental" ); } else if content.iter().any(|c| c.check_name("alias")) { gate_feature_post!(&self, doc_alias, attr.span, "#[doc(alias = \"...\")] is experimental" ); } else if content.iter().any(|c| c.check_name("keyword")) { gate_feature_post!(&self, doc_keyword, attr.span, "#[doc(keyword = \"...\")] is experimental" ); } } } if !self.context.features.unrestricted_attribute_tokens { // Unfortunately, `parse_meta` cannot be called speculatively because it can report // errors by itself, so we have to call it only if the feature is disabled. match attr.parse_meta(self.context.parse_sess) { Ok(meta) => { // allow attr_literals in #[repr(align(x))] and #[repr(packed(n))] let mut allow_attr_literal = false; if attr.path == "repr" { if let Some(content) = meta.meta_item_list() { allow_attr_literal = content.iter().any( |c| c.check_name("align") || c.check_name("packed")); } } if !allow_attr_literal && contains_novel_literal(&meta) { gate_feature_post!(&self, attr_literals, attr.span, "non-string literals in attributes, or string \ literals in top-level positions, are experimental"); } } Err(mut err) => { err.help("try enabling `#![feature(unrestricted_attribute_tokens)]`").emit() } } } } fn visit_name(&mut self, sp: Span, name: ast::Name) { if !name.as_str().is_ascii() { gate_feature_post!(&self, non_ascii_idents, self.context.parse_sess.source_map().def_span(sp), "non-ascii idents are not fully supported."); } } fn visit_use_tree(&mut self, use_tree: &'a ast::UseTree, id: NodeId, _nested: bool) { if let ast::UseTreeKind::Simple(Some(ident), ..) = use_tree.kind { if ident.name == "_" { gate_feature_post!(&self, underscore_imports, use_tree.span, "renaming imports with `_` is unstable"); } } visit::walk_use_tree(self, use_tree, id); } fn visit_item(&mut self, i: &'a ast::Item) { match i.node { ast::ItemKind::ExternCrate(_) => { if i.ident.name == "_" { gate_feature_post!(&self, underscore_imports, i.span, "renaming extern crates with `_` is unstable"); } } ast::ItemKind::ForeignMod(ref foreign_module) => { self.check_abi(foreign_module.abi, i.span); } ast::ItemKind::Fn(..) => { if attr::contains_name(&i.attrs[..], "plugin_registrar") { gate_feature_post!(&self, plugin_registrar, i.span, "compiler plugins are experimental and possibly buggy"); } if attr::contains_name(&i.attrs[..], "start") { gate_feature_post!(&self, start, i.span, "a #[start] function is an experimental \ feature whose signature may change \ over time"); } if attr::contains_name(&i.attrs[..], "main") { gate_feature_post!(&self, main, i.span, "declaration of a nonstandard #[main] \ function may change over time, for now \ a top-level `fn main()` is required"); } } ast::ItemKind::Struct(..) => { if let Some(attr) = attr::find_by_name(&i.attrs[..], "repr") { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name("simd") { gate_feature_post!(&self, repr_simd, attr.span, "SIMD types are experimental and possibly buggy"); } if let Some((name, _)) = item.name_value_literal() { if name == "packed" { gate_feature_post!(&self, repr_packed, attr.span, "the `#[repr(packed(n))]` attribute \ is experimental"); } } } } } ast::ItemKind::TraitAlias(..) => { gate_feature_post!(&self, trait_alias, i.span, "trait aliases are not yet fully implemented"); } ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => { if polarity == ast::ImplPolarity::Negative { gate_feature_post!(&self, optin_builtin_traits, i.span, "negative trait bounds are not yet fully implemented; \ use marker types for now"); } if let ast::Defaultness::Default = defaultness { gate_feature_post!(&self, specialization, i.span, "specialization is unstable"); } } ast::ItemKind::Trait(ast::IsAuto::Yes, ..) => { gate_feature_post!(&self, optin_builtin_traits, i.span, "auto traits are experimental and possibly buggy"); } ast::ItemKind::MacroDef(ast::MacroDef { legacy: false, .. }) => { let msg = "`macro` is experimental"; gate_feature_post!(&self, decl_macro, i.span, msg); } ast::ItemKind::Existential(..) => { gate_feature_post!( &self, existential_type, i.span, "existential types are unstable" ); } _ => {} } visit::walk_item(self, i); } fn visit_foreign_item(&mut self, i: &'a ast::ForeignItem) { match i.node { ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => { let link_name = attr::first_attr_value_str_by_name(&i.attrs, "link_name"); let links_to_llvm = match link_name { Some(val) => val.as_str().starts_with("llvm."), _ => false }; if links_to_llvm { gate_feature_post!(&self, link_llvm_intrinsics, i.span, "linking to LLVM intrinsics is experimental"); } } ast::ForeignItemKind::Ty => { gate_feature_post!(&self, extern_types, i.span, "extern types are experimental"); } ast::ForeignItemKind::Macro(..) => {} } visit::walk_foreign_item(self, i) } fn visit_ty(&mut self, ty: &'a ast::Ty) { match ty.node { ast::TyKind::BareFn(ref bare_fn_ty) => { self.check_abi(bare_fn_ty.abi, ty.span); } ast::TyKind::Never => { gate_feature_post!(&self, never_type, ty.span, "The `!` type is experimental"); } _ => {} } visit::walk_ty(self, ty) } fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) { if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty { if let ast::TyKind::Never = output_ty.node { // Do nothing } else { self.visit_ty(output_ty) } } } fn visit_expr(&mut self, e: &'a ast::Expr) { match e.node { ast::ExprKind::Box(_) => { gate_feature_post!(&self, box_syntax, e.span, EXPLAIN_BOX_SYNTAX); } ast::ExprKind::Type(..) => { gate_feature_post!(&self, type_ascription, e.span, "type ascription is experimental"); } ast::ExprKind::ObsoleteInPlace(..) => { // these get a hard error in ast-validation } ast::ExprKind::Yield(..) => { gate_feature_post!(&self, generators, e.span, "yield syntax is experimental"); } ast::ExprKind::Catch(_) => { gate_feature_post!(&self, catch_expr, e.span, "`catch` expression is experimental"); } ast::ExprKind::IfLet(ref pats, ..) | ast::ExprKind::WhileLet(ref pats, ..) => { if pats.len() > 1 { gate_feature_post!(&self, if_while_or_patterns, e.span, "multiple patterns in `if let` and `while let` are unstable"); } } ast::ExprKind::Block(_, opt_label) => { if let Some(label) = opt_label { gate_feature_post!(&self, label_break_value, label.ident.span, "labels on blocks are unstable"); } } ast::ExprKind::Closure(_, ast::IsAsync::Async { .. }, ..) => { gate_feature_post!(&self, async_await, e.span, "async closures are unstable"); } ast::ExprKind::Async(..) => { gate_feature_post!(&self, async_await, e.span, "async blocks are unstable"); } _ => {} } visit::walk_expr(self, e); } fn visit_arm(&mut self, arm: &'a ast::Arm) { visit::walk_arm(self, arm) } fn visit_pat(&mut self, pattern: &'a ast::Pat) { match pattern.node { PatKind::Slice(_, Some(ref subslice), _) => { gate_feature_post!(&self, slice_patterns, subslice.span, "syntax for subslices in slice patterns is not yet stabilized"); } PatKind::Box(..) => { gate_feature_post!(&self, box_patterns, pattern.span, "box pattern syntax is experimental"); } PatKind::Range(_, _, Spanned { node: RangeEnd::Excluded, .. }) => { gate_feature_post!(&self, exclusive_range_pattern, pattern.span, "exclusive range pattern syntax is experimental"); } PatKind::Paren(..) => { gate_feature_post!(&self, pattern_parentheses, pattern.span, "parentheses in patterns are unstable"); } _ => {} } visit::walk_pat(self, pattern) } fn visit_fn(&mut self, fn_kind: FnKind<'a>, fn_decl: &'a ast::FnDecl, span: Span, _node_id: NodeId) { match fn_kind { FnKind::ItemFn(_, header, _, _) => { // check for const fn and async fn declarations if header.asyncness.is_async() { gate_feature_post!(&self, async_await, span, "async fn is unstable"); } if header.constness.node == ast::Constness::Const { gate_feature_post!(&self, const_fn, span, "const fn is unstable"); } // stability of const fn methods are covered in // visit_trait_item and visit_impl_item below; this is // because default methods don't pass through this // point. self.check_abi(header.abi, span); } FnKind::Method(_, sig, _, _) => { self.check_abi(sig.header.abi, span); } _ => {} } visit::walk_fn(self, fn_kind, fn_decl, span); } fn visit_trait_item(&mut self, ti: &'a ast::TraitItem) { match ti.node { ast::TraitItemKind::Method(ref sig, ref block) => { if block.is_none() { self.check_abi(sig.header.abi, ti.span); } if sig.header.constness.node == ast::Constness::Const { gate_feature_post!(&self, const_fn, ti.span, "const fn is unstable"); } } ast::TraitItemKind::Type(_, ref default) => { // We use three if statements instead of something like match guards so that all // of these errors can be emitted if all cases apply. if default.is_some() { gate_feature_post!(&self, associated_type_defaults, ti.span, "associated type defaults are unstable"); } if !ti.generics.params.is_empty() { gate_feature_post!(&self, generic_associated_types, ti.span, "generic associated types are unstable"); } if !ti.generics.where_clause.predicates.is_empty() { gate_feature_post!(&self, generic_associated_types, ti.span, "where clauses on associated types are unstable"); } } _ => {} } visit::walk_trait_item(self, ti); } fn visit_impl_item(&mut self, ii: &'a ast::ImplItem) { if ii.defaultness == ast::Defaultness::Default { gate_feature_post!(&self, specialization, ii.span, "specialization is unstable"); } match ii.node { ast::ImplItemKind::Method(ref sig, _) => { if sig.header.constness.node == ast::Constness::Const { gate_feature_post!(&self, const_fn, ii.span, "const fn is unstable"); } } ast::ImplItemKind::Existential(..) => { gate_feature_post!( &self, existential_type, ii.span, "existential types are unstable" ); } ast::ImplItemKind::Type(_) if !ii.generics.params.is_empty() => { gate_feature_post!(&self, generic_associated_types, ii.span, "generic associated types are unstable"); } _ => {} } visit::walk_impl_item(self, ii); } fn visit_path(&mut self, path: &'a ast::Path, _id: NodeId) { for segment in &path.segments { // Identifiers we are going to check could come from a legacy macro (e.g. `#[test]`). // For such macros identifiers must have empty context, because this context is // used during name resolution and produced names must be unhygienic for compatibility. // On the other hand, we need the actual non-empty context for feature gate checking // because it's hygienic even for legacy macros. As previously stated, such context // cannot be kept in identifiers, so it's kept in paths instead and we take it from // there while keeping location info from the ident span. let span = segment.ident.span.with_ctxt(path.span.ctxt()); if segment.ident.name == keywords::Crate.name() { gate_feature_post!(&self, crate_in_paths, span, "`crate` in paths is experimental"); } else if segment.ident.name == keywords::Extern.name() { gate_feature_post!(&self, extern_in_paths, span, "`extern` in paths is experimental"); } } visit::walk_path(self, path); } fn visit_vis(&mut self, vis: &'a ast::Visibility) { if let ast::VisibilityKind::Crate(ast::CrateSugar::JustCrate) = vis.node { gate_feature_post!(&self, crate_visibility_modifier, vis.span, "`crate` visibility modifier is experimental"); } visit::walk_vis(self, vis); } } pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute], crate_edition: Edition) -> Features { fn feature_removed(span_handler: &Handler, span: Span, reason: Option<&str>) { let mut err = struct_span_err!(span_handler, span, E0557, "feature has been removed"); if let Some(reason) = reason { err.span_note(span, reason); } err.emit(); } // Some features are known to be incomplete and using them is likely to have // unanticipated results, such as compiler crashes. We warn the user about these // to alert them. let incomplete_features = ["generic_associated_types"]; let mut features = Features::new(); let mut edition_enabled_features = FxHashMap(); for &edition in ALL_EDITIONS { if edition <= crate_edition { // The `crate_edition` implies its respective umbrella feature-gate // (i.e. `#![feature(rust_20XX_preview)]` isn't needed on edition 20XX). edition_enabled_features.insert(Symbol::intern(edition.feature_name()), edition); } } for &(name, .., f_edition, set) in ACTIVE_FEATURES { if let Some(f_edition) = f_edition { if f_edition <= crate_edition { set(&mut features, DUMMY_SP); edition_enabled_features.insert(Symbol::intern(name), crate_edition); } } } // Process the edition umbrella feature-gates first, to ensure // `edition_enabled_features` is completed before it's queried. for attr in krate_attrs { if !attr.check_name("feature") { continue } let list = match attr.meta_item_list() { Some(list) => list, None => continue, }; for mi in list { let name = if let Some(word) = mi.word() { word.name() } else { continue }; if incomplete_features.iter().any(|f| *f == name.as_str()) { span_handler.struct_span_warn( mi.span, &format!( "the feature `{}` is incomplete and may cause the compiler to crash", name ) ).emit(); } if let Some(edition) = ALL_EDITIONS.iter().find(|e| name == e.feature_name()) { if *edition <= crate_edition { continue; } for &(name, .., f_edition, set) in ACTIVE_FEATURES { if let Some(f_edition) = f_edition { if f_edition <= *edition { // FIXME(Manishearth) there is currently no way to set // lib features by edition set(&mut features, DUMMY_SP); edition_enabled_features.insert(Symbol::intern(name), *edition); } } } } } } for attr in krate_attrs { if !attr.check_name("feature") { continue } let list = match attr.meta_item_list() { Some(list) => list, None => { span_err!(span_handler, attr.span, E0555, "malformed feature attribute, expected #![feature(...)]"); continue } }; for mi in list { let name = if let Some(word) = mi.word() { word.name() } else { span_err!(span_handler, mi.span, E0556, "malformed feature, expected just one word"); continue }; if let Some(edition) = edition_enabled_features.get(&name) { struct_span_warn!( span_handler, mi.span, E0705, "the feature `{}` is included in the Rust {} edition", name, edition, ).emit(); continue; } if ALL_EDITIONS.iter().any(|e| name == e.feature_name()) { // Handled in the separate loop above. continue; } if let Some((.., set)) = ACTIVE_FEATURES.iter().find(|f| name == f.0) { set(&mut features, mi.span); features.declared_lang_features.push((name, mi.span, None)); continue } let removed = REMOVED_FEATURES.iter().find(|f| name == f.0); let stable_removed = STABLE_REMOVED_FEATURES.iter().find(|f| name == f.0); if let Some((.., reason)) = removed.or(stable_removed) { feature_removed(span_handler, mi.span, *reason); continue } if let Some((_, since, ..)) = ACCEPTED_FEATURES.iter().find(|f| name == f.0) { let since = Some(Symbol::intern(since)); features.declared_lang_features.push((name, mi.span, since)); continue } features.declared_lib_features.push((name, mi.span)); } } features } pub fn check_crate(krate: &ast::Crate, sess: &ParseSess, features: &Features, plugin_attributes: &[(String, AttributeType)], unstable: UnstableFeatures) { maybe_stage_features(&sess.span_diagnostic, krate, unstable); let ctx = Context { features, parse_sess: sess, plugin_attributes, }; let visitor = &mut PostExpansionVisitor { context: &ctx }; visitor.whole_crate_feature_gates(krate); visit::walk_crate(visitor, krate); } #[derive(Clone, Copy, Hash)] pub enum UnstableFeatures { /// Hard errors for unstable features are active, as on /// beta/stable channels. Disallow, /// Allow features to be activated, as on nightly. Allow, /// Errors are bypassed for bootstrapping. This is required any time /// during the build that feature-related lints are set to warn or above /// because the build turns on warnings-as-errors and uses lots of unstable /// features. As a result, this is always required for building Rust itself. Cheat } impl UnstableFeatures { pub fn from_environment() -> UnstableFeatures { // Whether this is a feature-staged build, i.e. on the beta or stable channel let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some(); // Whether we should enable unstable features for bootstrapping let bootstrap = env::var("RUSTC_BOOTSTRAP").is_ok(); match (disable_unstable_features, bootstrap) { (_, true) => UnstableFeatures::Cheat, (true, _) => UnstableFeatures::Disallow, (false, _) => UnstableFeatures::Allow } } pub fn is_nightly_build(&self) -> bool { match *self { UnstableFeatures::Allow | UnstableFeatures::Cheat => true, _ => false, } } } fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, unstable: UnstableFeatures) { let allow_features = match unstable { UnstableFeatures::Allow => true, UnstableFeatures::Disallow => false, UnstableFeatures::Cheat => true }; if !allow_features { for attr in &krate.attrs { if attr.check_name("feature") { let release_channel = option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)"); span_err!(span_handler, attr.span, E0554, "#![feature] may not be used on the {} release channel", release_channel); } } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/util/rc_vec.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt; use std::ops::{Deref, Range}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult}; use rustc_data_structures::sync::Lrc; #[derive(Clone)] pub struct RcVec<T> { data: Lrc<Vec<T>>, offset: u32, len: u32, } impl<T> RcVec<T> { pub fn new(mut vec: Vec<T>) -> Self { // By default, constructing RcVec from Vec gives it just enough capacity // to hold the initial elements. Callers that anticipate needing to // extend the vector may prefer RcVec::new_preserving_capacity. vec.shrink_to_fit(); Self::new_preserving_capacity(vec) } pub fn new_preserving_capacity(vec: Vec<T>) -> Self { RcVec { offset: 0, len: vec.len() as u32, data: Lrc::new(vec), } } pub fn sub_slice(&self, range: Range<usize>) -> Self { RcVec { data: self.data.clone(), offset: self.offset + range.start as u32, len: (range.end - range.start) as u32, } } /// If this RcVec has exactly one strong reference, returns ownership of the /// underlying vector. Otherwise returns self unmodified. pub fn try_unwrap(self) -> Result<Vec<T>, Self> { match Lrc::try_unwrap(self.data) { // If no other RcVec shares ownership of this data. Ok(mut vec) => { // Drop any elements after our view of the data. vec.truncate(self.offset as usize + self.len as usize); // Drop any elements before our view of the data. Do this after // the `truncate` so that elements past the end of our view do // not need to be copied around. vec.drain(..self.offset as usize); Ok(vec) } // If the data is shared. Err(data) => Err(RcVec { data, ..self }), } } } impl<T> Deref for RcVec<T> { type Target = [T]; fn deref(&self) -> &[T] { &self.data[self.offset as usize..(self.offset + self.len) as usize] } } impl<T: fmt::Debug> fmt::Debug for RcVec<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(self.deref(), f) } } impl<CTX, T> HashStable<CTX> for RcVec<T> where T: HashStable<CTX>, { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) { (**self).hash_stable(hcx, hasher); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/util/node_count.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Simply gives a rought count of the number of nodes in an AST. use visit::*; use ast::*; use syntax_pos::Span; pub struct NodeCounter { pub count: usize, } impl NodeCounter { pub fn new() -> NodeCounter { NodeCounter { count: 0, } } } impl<'ast> Visitor<'ast> for NodeCounter { fn visit_ident(&mut self, ident: Ident) { self.count += 1; walk_ident(self, ident); } fn visit_mod(&mut self, m: &Mod, _s: Span, _a: &[Attribute], _n: NodeId) { self.count += 1; walk_mod(self, m) } fn visit_foreign_item(&mut self, i: &ForeignItem) { self.count += 1; walk_foreign_item(self, i) } fn visit_item(&mut self, i: &Item) { self.count += 1; walk_item(self, i) } fn visit_local(&mut self, l: &Local) { self.count += 1; walk_local(self, l) } fn visit_block(&mut self, b: &Block) { self.count += 1; walk_block(self, b) } fn visit_stmt(&mut self, s: &Stmt) { self.count += 1; walk_stmt(self, s) } fn visit_arm(&mut self, a: &Arm) { self.count += 1; walk_arm(self, a) } fn visit_pat(&mut self, p: &Pat) { self.count += 1; walk_pat(self, p) } fn visit_expr(&mut self, ex: &Expr) { self.count += 1; walk_expr(self, ex) } fn visit_ty(&mut self, t: &Ty) { self.count += 1; walk_ty(self, t) } fn visit_generic_param(&mut self, param: &GenericParam) { self.count += 1; walk_generic_param(self, param) } fn visit_generics(&mut self, g: &Generics) { self.count += 1; walk_generics(self, g) } fn visit_fn(&mut self, fk: FnKind, fd: &FnDecl, s: Span, _: NodeId) { self.count += 1; walk_fn(self, fk, fd, s) } fn visit_trait_item(&mut self, ti: &TraitItem) { self.count += 1; walk_trait_item(self, ti) } fn visit_impl_item(&mut self, ii: &ImplItem) { self.count += 1; walk_impl_item(self, ii) } fn visit_trait_ref(&mut self, t: &TraitRef) { self.count += 1; walk_trait_ref(self, t) } fn visit_param_bound(&mut self, bounds: &GenericBound) { self.count += 1; walk_param_bound(self, bounds) } fn visit_poly_trait_ref(&mut self, t: &PolyTraitRef, m: &TraitBoundModifier) { self.count += 1; walk_poly_trait_ref(self, t, m) } fn visit_variant_data(&mut self, s: &VariantData, _: Ident, _: &Generics, _: NodeId, _: Span) { self.count += 1; walk_struct_def(self, s) } fn visit_struct_field(&mut self, s: &StructField) { self.count += 1; walk_struct_field(self, s) } fn visit_enum_def(&mut self, enum_definition: &EnumDef, generics: &Generics, item_id: NodeId, _: Span) { self.count += 1; walk_enum_def(self, enum_definition, generics, item_id) } fn visit_variant(&mut self, v: &Variant, g: &Generics, item_id: NodeId) { self.count += 1; walk_variant(self, v, g, item_id) } fn visit_lifetime(&mut self, lifetime: &Lifetime) { self.count += 1; walk_lifetime(self, lifetime) } fn visit_mac(&mut self, _mac: &Mac) { self.count += 1; walk_mac(self, _mac) } fn visit_path(&mut self, path: &Path, _id: NodeId) { self.count += 1; walk_path(self, path) } fn visit_use_tree(&mut self, use_tree: &UseTree, id: NodeId, _nested: bool) { self.count += 1; walk_use_tree(self, use_tree, id) } fn visit_generic_args(&mut self, path_span: Span, generic_args: &GenericArgs) { self.count += 1; walk_generic_args(self, path_span, generic_args) } fn visit_assoc_type_binding(&mut self, type_binding: &TypeBinding) { self.count += 1; walk_assoc_type_binding(self, type_binding) } fn visit_attribute(&mut self, _attr: &Attribute) { self.count += 1; } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/util/rc_slice.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt; use std::ops::{Deref, Range}; use rustc_data_structures::sync::Lrc; use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, HashStable}; #[derive(Clone)] pub struct RcSlice<T> { data: Lrc<Box<[T]>>, offset: u32, len: u32, } impl<T> RcSlice<T> { pub fn new(vec: Vec<T>) -> Self { RcSlice { offset: 0, len: vec.len() as u32, data: Lrc::new(vec.into_boxed_slice()), } } pub fn sub_slice(&self, range: Range<usize>) -> Self { RcSlice { data: self.data.clone(), offset: self.offset + range.start as u32, len: (range.end - range.start) as u32, } } } impl<T> Deref for RcSlice<T> { type Target = [T]; fn deref(&self) -> &[T] { &self.data[self.offset as usize .. (self.offset + self.len) as usize] } } impl<T: fmt::Debug> fmt::Debug for RcSlice<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(self.deref(), f) } } impl<CTX, T> HashStable<CTX> for RcSlice<T> where T: HashStable<CTX> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) { (**self).hash_stable(hcx, hasher); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/util/parser_testing.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::{self, Ident}; use source_map::FilePathMapping; use parse::{ParseSess, PResult, source_file_to_stream}; use parse::{lexer, new_parser_from_source_str}; use parse::parser::Parser; use ptr::P; use tokenstream::TokenStream; use std::iter::Peekable; use std::path::PathBuf; /// Map a string to tts, using a made-up filename: pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(FilePathMapping::empty()); source_file_to_stream(&ps, ps.source_map() .new_source_file(PathBuf::from("bogofile").into(), source_str), None) } /// Map string to parser (via tts) pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> { new_parser_from_source_str(ps, PathBuf::from("bogofile").into(), source_str) } fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T where F: FnOnce(&mut Parser<'a>) -> PResult<'a, T>, { let mut p = string_to_parser(&ps, s); let x = panictry!(f(&mut p)); p.abort_if_errors(); x } /// Parse a string, return a crate. pub fn string_to_crate (source_str : String) -> ast::Crate { let ps = ParseSess::new(FilePathMapping::empty()); with_error_checking_parse(source_str, &ps, |p| { p.parse_crate_mod() }) } /// Parse a string, return an expr pub fn string_to_expr (source_str : String) -> P<ast::Expr> { let ps = ParseSess::new(FilePathMapping::empty()); with_error_checking_parse(source_str, &ps, |p| { p.parse_expr() }) } /// Parse a string, return an item pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> { let ps = ParseSess::new(FilePathMapping::empty()); with_error_checking_parse(source_str, &ps, |p| { p.parse_item() }) } /// Parse a string, return a pat. Uses "irrefutable"... which doesn't /// (currently) affect parsing. pub fn string_to_pat(source_str: String) -> P<ast::Pat> { let ps = ParseSess::new(FilePathMapping::empty()); with_error_checking_parse(source_str, &ps, |p| { p.parse_pat() }) } /// Convert a vector of strings to a vector of Ident's pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<Ident> { ids.iter().map(|u| Ident::from_str(*u)).collect() } /// Does the given string match the pattern? whitespace in the first string /// may be deleted or replaced with other whitespace to match the pattern. /// This function is relatively Unicode-ignorant; fortunately, the careful design /// of UTF-8 mitigates this ignorance. It doesn't do NKF-normalization(?). pub fn matches_codepattern(a : &str, b : &str) -> bool { let mut a_iter = a.chars().peekable(); let mut b_iter = b.chars().peekable(); loop { let (a, b) = match (a_iter.peek(), b_iter.peek()) { (None, None) => return true, (None, _) => return false, (Some(&a), None) => { if is_pattern_whitespace(a) { break // trailing whitespace check is out of loop for borrowck } else { return false } } (Some(&a), Some(&b)) => (a, b) }; if is_pattern_whitespace(a) && is_pattern_whitespace(b) { // skip whitespace for a and b scan_for_non_ws_or_end(&mut a_iter); scan_for_non_ws_or_end(&mut b_iter); } else if is_pattern_whitespace(a) { // skip whitespace for a scan_for_non_ws_or_end(&mut a_iter); } else if a == b { a_iter.next(); b_iter.next(); } else { return false } } // check if a has *only* trailing whitespace a_iter.all(is_pattern_whitespace) } /// Advances the given peekable `Iterator` until it reaches a non-whitespace character fn scan_for_non_ws_or_end<I: Iterator<Item= char>>(iter: &mut Peekable<I>) { while lexer::is_pattern_whitespace(iter.peek().cloned()) { iter.next(); } } pub fn is_pattern_whitespace(c: char) -> bool { lexer::is_pattern_whitespace(Some(c)) } #[cfg(test)] mod tests { use super::*; #[test] fn eqmodws() { assert_eq!(matches_codepattern("",""),true); assert_eq!(matches_codepattern("","a"),false); assert_eq!(matches_codepattern("a",""),false); assert_eq!(matches_codepattern("a","a"),true); assert_eq!(matches_codepattern("a b","a \n\t\r b"),true); assert_eq!(matches_codepattern("a b ","a \n\t\r b"),true); assert_eq!(matches_codepattern("a b","a \n\t\r b "),false); assert_eq!(matches_codepattern("a b","a b"),true); assert_eq!(matches_codepattern("ab","a b"),false); assert_eq!(matches_codepattern("a b","ab"),true); assert_eq!(matches_codepattern(" a b","ab"),true); } #[test] fn pattern_whitespace() { assert_eq!(matches_codepattern("","\x0C"), false); assert_eq!(matches_codepattern("a b ","a \u{0085}\n\t\r b"),true); assert_eq!(matches_codepattern("a b","a \u{0085}\n\t\r b "),false); } #[test] fn non_pattern_whitespace() { // These have the property 'White_Space' but not 'Pattern_White_Space' assert_eq!(matches_codepattern("a b","a\u{2002}b"), false); assert_eq!(matches_codepattern("a b","a\u{2002}b"), false); assert_eq!(matches_codepattern("\u{205F}a b","ab"), false); assert_eq!(matches_codepattern("a \u{3000}b","ab"), false); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/util/lev_distance.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::cmp; use symbol::Symbol; /// To find the Levenshtein distance between two strings pub fn lev_distance(a: &str, b: &str) -> usize { // cases which don't require further computation if a.is_empty() { return b.chars().count(); } else if b.is_empty() { return a.chars().count(); } let mut dcol: Vec<_> = (0..b.len() + 1).collect(); let mut t_last = 0; for (i, sc) in a.chars().enumerate() { let mut current = i; dcol[0] = current + 1; for (j, tc) in b.chars().enumerate() { let next = dcol[j + 1]; if sc == tc { dcol[j + 1] = current; } else { dcol[j + 1] = cmp::min(current, next); dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; } current = next; t_last = j; } } dcol[t_last + 1] } /// To find the best match for a given string from an iterator of names /// As a loose rule to avoid the obviously incorrect suggestions, it takes /// an optional limit for the maximum allowable edit distance, which defaults /// to one-third of the given word. /// Besides Levenshtein, we use case insensitive comparison to improve accuracy on an edge case with /// a lower(upper)case letters mismatch. pub fn find_best_match_for_name<'a, T>(iter_names: T, lookup: &str, dist: Option<usize>) -> Option<Symbol> where T: Iterator<Item = &'a Symbol> { let max_dist = dist.map_or_else(|| cmp::max(lookup.len(), 3) / 3, |d| d); let (case_insensitive_match, levenstein_match) = iter_names .filter_map(|&name| { let dist = lev_distance(lookup, &name.as_str()); if dist <= max_dist { Some((name, dist)) } else { None } }) // Here we are collecting the next structure: // (case_insensitive_match, (levenstein_match, levenstein_distance)) .fold((None, None), |result, (candidate, dist)| { ( if candidate.as_str().to_uppercase() == lookup.to_uppercase() { Some(candidate) } else { result.0 }, match result.1 { None => Some((candidate, dist)), Some((c, d)) => Some(if dist < d { (candidate, dist) } else { (c, d) }) } ) }); if let Some(candidate) = case_insensitive_match { Some(candidate) // exact case insensitive match has a higher priority } else { if let Some((candidate, _)) = levenstein_match { Some(candidate) } else { None } } } #[test] fn test_lev_distance() { use std::char::{from_u32, MAX}; // Test bytelength agnosticity for c in (0..MAX as u32) .filter_map(|i| from_u32(i)) .map(|i| i.to_string()) { assert_eq!(lev_distance(&c[..], &c[..]), 0); } let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; let b = "\nMary häd ä little lämb\n\nLittle lämb\n"; let c = "Mary häd ä little lämb\n\nLittle lämb\n"; assert_eq!(lev_distance(a, b), 1); assert_eq!(lev_distance(b, a), 1); assert_eq!(lev_distance(a, c), 2); assert_eq!(lev_distance(c, a), 2); assert_eq!(lev_distance(b, c), 1); assert_eq!(lev_distance(c, b), 1); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/util/move_map.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::ptr; use OneVector; pub trait MoveMap<T>: Sized { fn move_map<F>(self, mut f: F) -> Self where F: FnMut(T) -> T { self.move_flat_map(|e| Some(f(e))) } fn move_flat_map<F, I>(self, f: F) -> Self where F: FnMut(T) -> I, I: IntoIterator<Item=T>; } impl<T> MoveMap<T> for Vec<T> { fn move_flat_map<F, I>(mut self, mut f: F) -> Self where F: FnMut(T) -> I, I: IntoIterator<Item=T> { let mut read_i = 0; let mut write_i = 0; unsafe { let mut old_len = self.len(); self.set_len(0); // make sure we just leak elements in case of panic while read_i < old_len { // move the read_i'th item out of the vector and map it // to an iterator let e = ptr::read(self.get_unchecked(read_i)); let iter = f(e).into_iter(); read_i += 1; for e in iter { if write_i < read_i { ptr::write(self.get_unchecked_mut(write_i), e); write_i += 1; } else { // If this is reached we ran out of space // in the middle of the vector. // However, the vector is in a valid state here, // so we just do a somewhat inefficient insert. self.set_len(old_len); self.insert(write_i, e); old_len = self.len(); self.set_len(0); read_i += 1; write_i += 1; } } } // write_i tracks the number of actually written new items. self.set_len(write_i); } self } } impl<T> MoveMap<T> for ::ptr::P<[T]> { fn move_flat_map<F, I>(self, f: F) -> Self where F: FnMut(T) -> I, I: IntoIterator<Item=T> { ::ptr::P::from_vec(self.into_vec().move_flat_map(f)) } } impl<T> MoveMap<T> for OneVector<T> { fn move_flat_map<F, I>(mut self, mut f: F) -> Self where F: FnMut(T) -> I, I: IntoIterator<Item=T> { let mut read_i = 0; let mut write_i = 0; unsafe { let mut old_len = self.len(); self.set_len(0); // make sure we just leak elements in case of panic while read_i < old_len { // move the read_i'th item out of the vector and map it // to an iterator let e = ptr::read(self.get_unchecked(read_i)); let iter = f(e).into_iter(); read_i += 1; for e in iter { if write_i < read_i { ptr::write(self.get_unchecked_mut(write_i), e); write_i += 1; } else { // If this is reached we ran out of space // in the middle of the vector. // However, the vector is in a valid state here, // so we just do a somewhat inefficient insert. self.set_len(old_len); self.insert(write_i, e); old_len = self.len(); self.set_len(0); read_i += 1; write_i += 1; } } } // write_i tracks the number of actually written new items. self.set_len(write_i); } self } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/util/parser.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use parse::token::{Token, BinOpToken}; use symbol::keywords; use ast::{self, BinOpKind}; /// Associative operator with precedence. /// /// This is the enum which specifies operator precedence and fixity to the parser. #[derive(PartialEq, Debug)] pub enum AssocOp { /// `+` Add, /// `-` Subtract, /// `*` Multiply, /// `/` Divide, /// `%` Modulus, /// `&&` LAnd, /// `||` LOr, /// `^` BitXor, /// `&` BitAnd, /// `|` BitOr, /// `<<` ShiftLeft, /// `>>` ShiftRight, /// `==` Equal, /// `<` Less, /// `<=` LessEqual, /// `!=` NotEqual, /// `>` Greater, /// `>=` GreaterEqual, /// `=` Assign, /// `<-` ObsoleteInPlace, /// `?=` where ? is one of the BinOpToken AssignOp(BinOpToken), /// `as` As, /// `..` range DotDot, /// `..=` range DotDotEq, /// `:` Colon, } #[derive(PartialEq, Debug)] pub enum Fixity { /// The operator is left-associative Left, /// The operator is right-associative Right, /// The operator is not associative None } impl AssocOp { /// Create a new AssocOP from a token pub fn from_token(t: &Token) -> Option<AssocOp> { use self::AssocOp::*; match *t { Token::BinOpEq(k) => Some(AssignOp(k)), Token::LArrow => Some(ObsoleteInPlace), Token::Eq => Some(Assign), Token::BinOp(BinOpToken::Star) => Some(Multiply), Token::BinOp(BinOpToken::Slash) => Some(Divide), Token::BinOp(BinOpToken::Percent) => Some(Modulus), Token::BinOp(BinOpToken::Plus) => Some(Add), Token::BinOp(BinOpToken::Minus) => Some(Subtract), Token::BinOp(BinOpToken::Shl) => Some(ShiftLeft), Token::BinOp(BinOpToken::Shr) => Some(ShiftRight), Token::BinOp(BinOpToken::And) => Some(BitAnd), Token::BinOp(BinOpToken::Caret) => Some(BitXor), Token::BinOp(BinOpToken::Or) => Some(BitOr), Token::Lt => Some(Less), Token::Le => Some(LessEqual), Token::Ge => Some(GreaterEqual), Token::Gt => Some(Greater), Token::EqEq => Some(Equal), Token::Ne => Some(NotEqual), Token::AndAnd => Some(LAnd), Token::OrOr => Some(LOr), Token::DotDot => Some(DotDot), Token::DotDotEq => Some(DotDotEq), // DotDotDot is no longer supported, but we need some way to display the error Token::DotDotDot => Some(DotDotEq), Token::Colon => Some(Colon), _ if t.is_keyword(keywords::As) => Some(As), _ => None } } /// Create a new AssocOp from ast::BinOpKind. pub fn from_ast_binop(op: BinOpKind) -> Self { use self::AssocOp::*; match op { BinOpKind::Lt => Less, BinOpKind::Gt => Greater, BinOpKind::Le => LessEqual, BinOpKind::Ge => GreaterEqual, BinOpKind::Eq => Equal, BinOpKind::Ne => NotEqual, BinOpKind::Mul => Multiply, BinOpKind::Div => Divide, BinOpKind::Rem => Modulus, BinOpKind::Add => Add, BinOpKind::Sub => Subtract, BinOpKind::Shl => ShiftLeft, BinOpKind::Shr => ShiftRight, BinOpKind::BitAnd => BitAnd, BinOpKind::BitXor => BitXor, BinOpKind::BitOr => BitOr, BinOpKind::And => LAnd, BinOpKind::Or => LOr } } /// Gets the precedence of this operator pub fn precedence(&self) -> usize { use self::AssocOp::*; match *self { As | Colon => 14, Multiply | Divide | Modulus => 13, Add | Subtract => 12, ShiftLeft | ShiftRight => 11, BitAnd => 10, BitXor => 9, BitOr => 8, Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual => 7, LAnd => 6, LOr => 5, DotDot | DotDotEq => 4, ObsoleteInPlace => 3, Assign | AssignOp(_) => 2, } } /// Gets the fixity of this operator pub fn fixity(&self) -> Fixity { use self::AssocOp::*; // NOTE: it is a bug to have an operators that has same precedence but different fixities! match *self { ObsoleteInPlace | Assign | AssignOp(_) => Fixity::Right, As | Multiply | Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd | BitXor | BitOr | Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | LAnd | LOr | Colon => Fixity::Left, DotDot | DotDotEq => Fixity::None } } pub fn is_comparison(&self) -> bool { use self::AssocOp::*; match *self { Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual => true, ObsoleteInPlace | Assign | AssignOp(_) | As | Multiply | Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd | BitXor | BitOr | LAnd | LOr | DotDot | DotDotEq | Colon => false } } pub fn is_assign_like(&self) -> bool { use self::AssocOp::*; match *self { Assign | AssignOp(_) | ObsoleteInPlace => true, Less | Greater | LessEqual | GreaterEqual | Equal | NotEqual | As | Multiply | Divide | Modulus | Add | Subtract | ShiftLeft | ShiftRight | BitAnd | BitXor | BitOr | LAnd | LOr | DotDot | DotDotEq | Colon => false } } pub fn to_ast_binop(&self) -> Option<BinOpKind> { use self::AssocOp::*; match *self { Less => Some(BinOpKind::Lt), Greater => Some(BinOpKind::Gt), LessEqual => Some(BinOpKind::Le), GreaterEqual => Some(BinOpKind::Ge), Equal => Some(BinOpKind::Eq), NotEqual => Some(BinOpKind::Ne), Multiply => Some(BinOpKind::Mul), Divide => Some(BinOpKind::Div), Modulus => Some(BinOpKind::Rem), Add => Some(BinOpKind::Add), Subtract => Some(BinOpKind::Sub), ShiftLeft => Some(BinOpKind::Shl), ShiftRight => Some(BinOpKind::Shr), BitAnd => Some(BinOpKind::BitAnd), BitXor => Some(BinOpKind::BitXor), BitOr => Some(BinOpKind::BitOr), LAnd => Some(BinOpKind::And), LOr => Some(BinOpKind::Or), ObsoleteInPlace | Assign | AssignOp(_) | As | DotDot | DotDotEq | Colon => None } } } pub const PREC_RESET: i8 = -100; pub const PREC_CLOSURE: i8 = -40; pub const PREC_JUMP: i8 = -30; pub const PREC_RANGE: i8 = -10; // The range 2 ... 14 is reserved for AssocOp binary operator precedences. pub const PREC_PREFIX: i8 = 50; pub const PREC_POSTFIX: i8 = 60; pub const PREC_PAREN: i8 = 99; pub const PREC_FORCE_PAREN: i8 = 100; #[derive(Debug, Clone, Copy)] pub enum ExprPrecedence { Closure, Break, Continue, Ret, Yield, Range, Binary(BinOpKind), ObsoleteInPlace, Cast, Type, Assign, AssignOp, Box, AddrOf, Unary, Call, MethodCall, Field, Index, Try, InlineAsm, Mac, Array, Repeat, Tup, Lit, Path, Paren, If, IfLet, While, WhileLet, ForLoop, Loop, Match, Block, Catch, Struct, Async, } impl ExprPrecedence { pub fn order(self) -> i8 { match self { ExprPrecedence::Closure => PREC_CLOSURE, ExprPrecedence::Break | ExprPrecedence::Continue | ExprPrecedence::Ret | ExprPrecedence::Yield => PREC_JUMP, // `Range` claims to have higher precedence than `Assign`, but `x .. x = x` fails to // parse, instead of parsing as `(x .. x) = x`. Giving `Range` a lower precedence // ensures that `pprust` will add parentheses in the right places to get the desired // parse. ExprPrecedence::Range => PREC_RANGE, // Binop-like expr kinds, handled by `AssocOp`. ExprPrecedence::Binary(op) => AssocOp::from_ast_binop(op).precedence() as i8, ExprPrecedence::ObsoleteInPlace => AssocOp::ObsoleteInPlace.precedence() as i8, ExprPrecedence::Cast => AssocOp::As.precedence() as i8, ExprPrecedence::Type => AssocOp::Colon.precedence() as i8, ExprPrecedence::Assign | ExprPrecedence::AssignOp => AssocOp::Assign.precedence() as i8, // Unary, prefix ExprPrecedence::Box | ExprPrecedence::AddrOf | ExprPrecedence::Unary => PREC_PREFIX, // Unary, postfix ExprPrecedence::Call | ExprPrecedence::MethodCall | ExprPrecedence::Field | ExprPrecedence::Index | ExprPrecedence::Try | ExprPrecedence::InlineAsm | ExprPrecedence::Mac => PREC_POSTFIX, // Never need parens ExprPrecedence::Array | ExprPrecedence::Repeat | ExprPrecedence::Tup | ExprPrecedence::Lit | ExprPrecedence::Path | ExprPrecedence::Paren | ExprPrecedence::If | ExprPrecedence::IfLet | ExprPrecedence::While | ExprPrecedence::WhileLet | ExprPrecedence::ForLoop | ExprPrecedence::Loop | ExprPrecedence::Match | ExprPrecedence::Block | ExprPrecedence::Catch | ExprPrecedence::Async | ExprPrecedence::Struct => PREC_PAREN, } } } /// Expressions that syntactically contain an "exterior" struct literal i.e. not surrounded by any /// parens or other delimiters, e.g. `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and /// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not. pub fn contains_exterior_struct_lit(value: &ast::Expr) -> bool { match value.node { ast::ExprKind::Struct(..) => true, ast::ExprKind::Assign(ref lhs, ref rhs) | ast::ExprKind::AssignOp(_, ref lhs, ref rhs) | ast::ExprKind::Binary(_, ref lhs, ref rhs) => { // X { y: 1 } + X { y: 2 } contains_exterior_struct_lit(&lhs) || contains_exterior_struct_lit(&rhs) } ast::ExprKind::Unary(_, ref x) | ast::ExprKind::Cast(ref x, _) | ast::ExprKind::Type(ref x, _) | ast::ExprKind::Field(ref x, _) | ast::ExprKind::Index(ref x, _) => { // &X { y: 1 }, X { y: 1 }.y contains_exterior_struct_lit(&x) } ast::ExprKind::MethodCall(.., ref exprs) => { // X { y: 1 }.bar(...) contains_exterior_struct_lit(&exprs[0]) } _ => false, } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/derive.rs
// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use attr::HasAttrs; use ast; use source_map::{hygiene, ExpnInfo, ExpnFormat}; use ext::base::ExtCtxt; use ext::build::AstBuilder; use parse::parser::PathStyle; use symbol::Symbol; use syntax_pos::Span; use std::collections::HashSet; pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> { let mut result = Vec::new(); attrs.retain(|attr| { if attr.path != "derive" { return true; } match attr.parse_list(cx.parse_sess, |parser| parser.parse_path_allowing_meta(PathStyle::Mod)) { Ok(ref traits) if traits.is_empty() => { cx.span_warn(attr.span, "empty trait list in `derive`"); false } Ok(traits) => { result.extend(traits); true } Err(mut e) => { e.emit(); false } } }); result } pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T where T: HasAttrs, { let (mut names, mut pretty_name) = (HashSet::new(), "derive(".to_owned()); for (i, path) in traits.iter().enumerate() { if i > 0 { pretty_name.push_str(", "); } pretty_name.push_str(&path.to_string()); names.insert(unwrap_or!(path.segments.get(0), continue).ident.name); } pretty_name.push(')'); cx.current_expansion.mark.set_expn_info(ExpnInfo { call_site: span, def_site: None, format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)), allow_internal_unstable: true, allow_internal_unsafe: false, local_inner_macros: false, edition: hygiene::default_edition(), }); let span = span.with_ctxt(cx.backtrace()); item.map_attrs(|mut attrs| { if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) { let meta = cx.meta_word(span, Symbol::intern("structural_match")); attrs.push(cx.attribute(span, meta)); } if names.contains(&Symbol::intern("Copy")) { let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker")); attrs.push(cx.attribute(span, meta)); } attrs }) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/quote.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty}; use source_map::respan; use syntax_pos::Span; use ext::base::ExtCtxt; use ext::base; use ext::build::AstBuilder; use parse::parser::{Parser, PathStyle}; use parse::token; use ptr::P; use tokenstream::{TokenStream, TokenTree}; /// Quasiquoting works via token trees. /// /// This is registered as a set of expression syntax extension called quote! /// that lifts its argument token-tree to an AST representing the /// construction of the same token tree, with `token::SubstNt` interpreted /// as antiquotes (splices). pub mod rt { use ast; use source_map::Spanned; use ext::base::ExtCtxt; use parse::{self, classify}; use parse::token::{self, Token}; use ptr::P; use symbol::Symbol; use ThinVec; use tokenstream::{self, TokenTree, TokenStream}; pub use parse::new_parser_from_tts; pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName}; pub use source_map::{dummy_spanned}; pub trait ToTokens { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree>; } impl ToTokens for TokenTree { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { vec![self.clone()] } } impl<T: ToTokens> ToTokens for Vec<T> { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { self.iter().flat_map(|t| t.to_tokens(cx)).collect() } } impl<T: ToTokens> ToTokens for Spanned<T> { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { // FIXME: use the span? self.node.to_tokens(cx) } } impl<T: ToTokens> ToTokens for Option<T> { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { match *self { Some(ref t) => t.to_tokens(cx), None => Vec::new(), } } } impl ToTokens for ast::Ident { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { vec![TokenTree::Token(self.span, Token::from_ast_ident(*self))] } } impl ToTokens for ast::Path { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtPath(self.clone()); vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Ty { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtTy(P(self.clone())); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Block { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtBlock(P(self.clone())); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Generics { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtGenerics(self.clone()); vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::WhereClause { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtWhereClause(self.clone()); vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for P<ast::Item> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtItem(self.clone()); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::ImplItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtImplItem(self.clone()); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for P<ast::ImplItem> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtImplItem((**self).clone()); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::TraitItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtTraitItem(self.clone()); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Stmt { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtStmt(self.clone()); let mut tts = vec![TokenTree::Token(self.span, Token::interpolated(nt))]; // Some statements require a trailing semicolon. if classify::stmt_ends_with_semi(&self.node) { tts.push(TokenTree::Token(self.span, token::Semi)); } tts } } impl ToTokens for P<ast::Expr> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtExpr(self.clone()); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for P<ast::Pat> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtPat(self.clone()); vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Arm { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtArm(self.clone()); vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Arg { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtArg(self.clone()); vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for P<ast::Block> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtBlock(self.clone()); vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Lifetime { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { vec![TokenTree::Token(self.ident.span, token::Lifetime(self.ident))] } } macro_rules! impl_to_tokens_slice { ($t: ty, $sep: expr) => { impl ToTokens for [$t] { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let mut v = vec![]; for (i, x) in self.iter().enumerate() { if i > 0 { v.extend_from_slice(&$sep); } v.extend(x.to_tokens(cx)); } v } } }; } impl_to_tokens_slice! { ast::Ty, [TokenTree::Token(DUMMY_SP, token::Comma)] } impl_to_tokens_slice! { P<ast::Item>, [] } impl_to_tokens_slice! { ast::Arg, [TokenTree::Token(DUMMY_SP, token::Comma)] } impl ToTokens for ast::MetaItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let nt = token::NtMeta(self.clone()); vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Attribute { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { let mut r = vec![]; // FIXME: The spans could be better r.push(TokenTree::Token(self.span, token::Pound)); if self.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } let mut inner = Vec::new(); for (i, segment) in self.path.segments.iter().enumerate() { if i > 0 { inner.push(TokenTree::Token(self.span, token::Colon).into()); } inner.push(TokenTree::Token( self.span, token::Token::from_ast_ident(segment.ident) ).into()); } inner.push(self.tokens.clone()); r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { delim: token::Bracket, tts: TokenStream::concat(inner).into() })); r } } impl ToTokens for str { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let lit = ast::LitKind::Str(Symbol::intern(self), ast::StrStyle::Cooked); dummy_spanned(lit).to_tokens(cx) } } impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited { delim: token::Paren, tts: TokenStream::empty().into(), })] } } impl ToTokens for ast::Lit { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { // FIXME: This is wrong P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Lit(P(self.clone())), span: DUMMY_SP, attrs: ThinVec::new(), }).to_tokens(cx) } } impl ToTokens for bool { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { dummy_spanned(ast::LitKind::Bool(*self)).to_tokens(cx) } } impl ToTokens for char { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { dummy_spanned(ast::LitKind::Char(*self)).to_tokens(cx) } } macro_rules! impl_to_tokens_int { (signed, $t:ty, $tag:expr) => ( impl ToTokens for $t { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let val = if *self < 0 { -self } else { *self }; let lit = ast::LitKind::Int(val as u128, ast::LitIntType::Signed($tag)); let lit = P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Lit(P(dummy_spanned(lit))), span: DUMMY_SP, attrs: ThinVec::new(), }); if *self >= 0 { return lit.to_tokens(cx); } P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Unary(ast::UnOp::Neg, lit), span: DUMMY_SP, attrs: ThinVec::new(), }).to_tokens(cx) } } ); (unsigned, $t:ty, $tag:expr) => ( impl ToTokens for $t { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let lit = ast::LitKind::Int(*self as u128, ast::LitIntType::Unsigned($tag)); dummy_spanned(lit).to_tokens(cx) } } ); } impl_to_tokens_int! { signed, isize, ast::IntTy::Isize } impl_to_tokens_int! { signed, i8, ast::IntTy::I8 } impl_to_tokens_int! { signed, i16, ast::IntTy::I16 } impl_to_tokens_int! { signed, i32, ast::IntTy::I32 } impl_to_tokens_int! { signed, i64, ast::IntTy::I64 } impl_to_tokens_int! { unsigned, usize, ast::UintTy::Usize } impl_to_tokens_int! { unsigned, u8, ast::UintTy::U8 } impl_to_tokens_int! { unsigned, u16, ast::UintTy::U16 } impl_to_tokens_int! { unsigned, u32, ast::UintTy::U32 } impl_to_tokens_int! { unsigned, u64, ast::UintTy::U64 } pub trait ExtParseUtils { fn parse_item(&self, s: String) -> P<ast::Item>; fn parse_expr(&self, s: String) -> P<ast::Expr>; fn parse_stmt(&self, s: String) -> ast::Stmt; fn parse_tts(&self, s: String) -> Vec<TokenTree>; } impl<'a> ExtParseUtils for ExtCtxt<'a> { fn parse_item(&self, s: String) -> P<ast::Item> { panictry!(parse::parse_item_from_source_str( FileName::QuoteExpansion, s, self.parse_sess())).expect("parse error") } fn parse_stmt(&self, s: String) -> ast::Stmt { panictry!(parse::parse_stmt_from_source_str( FileName::QuoteExpansion, s, self.parse_sess())).expect("parse error") } fn parse_expr(&self, s: String) -> P<ast::Expr> { panictry!(parse::parse_expr_from_source_str( FileName::QuoteExpansion, s, self.parse_sess())) } fn parse_tts(&self, s: String) -> Vec<TokenTree> { let source_name = FileName::QuoteExpansion; parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None) .into_trees().collect() } } } // Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`. pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> { use tokenstream::Delimited; let mut results = Vec::new(); let mut result = Vec::new(); for tree in tts { match tree { TokenTree::Token(_, token::OpenDelim(..)) => { results.push(::std::mem::replace(&mut result, Vec::new())); } TokenTree::Token(span, token::CloseDelim(delim)) => { let tree = TokenTree::Delimited(span, Delimited { delim, tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(), }); result = results.pop().unwrap(); result.push(tree); } tree => result.push(tree), } } result } // These panicking parsing functions are used by the quote_*!() syntax extensions, // but shouldn't be used otherwise. pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> { panictry!(parser.parse_expr()) } pub fn parse_item_panic(parser: &mut Parser) -> Option<P<Item>> { panictry!(parser.parse_item()) } pub fn parse_pat_panic(parser: &mut Parser) -> P<Pat> { panictry!(parser.parse_pat()) } pub fn parse_arm_panic(parser: &mut Parser) -> Arm { panictry!(parser.parse_arm()) } pub fn parse_ty_panic(parser: &mut Parser) -> P<Ty> { panictry!(parser.parse_ty()) } pub fn parse_stmt_panic(parser: &mut Parser) -> Option<Stmt> { panictry!(parser.parse_stmt()) } pub fn parse_attribute_panic(parser: &mut Parser, permit_inner: bool) -> ast::Attribute { panictry!(parser.parse_attribute(permit_inner)) } pub fn parse_arg_panic(parser: &mut Parser) -> Arg { panictry!(parser.parse_arg()) } pub fn parse_block_panic(parser: &mut Parser) -> P<Block> { panictry!(parser.parse_block()) } pub fn parse_meta_item_panic(parser: &mut Parser) -> ast::MetaItem { panictry!(parser.parse_meta_item()) } pub fn parse_path_panic(parser: &mut Parser, mode: PathStyle) -> ast::Path { panictry!(parser.parse_path(mode)) } pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'cx> { let (cx_expr, expr) = expand_tts(cx, sp, tts); let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]); base::MacEager::expr(expanded) } pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'cx> { let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_item<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'cx> { let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'cx> { let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_arm(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_ty(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_stmt(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_attr(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_attribute_panic", vec![cx.expr_bool(sp, true)], tts); base::MacEager::expr(expanded) } pub fn expand_quote_arg(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_arg_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_block(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_block_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_meta_item(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let expanded = expand_parse_call(cx, sp, "parse_meta_item_panic", vec![], tts); base::MacEager::expr(expanded) } pub fn expand_quote_path(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<dyn base::MacResult+'static> { let mode = mk_parser_path(cx, sp, &["PathStyle", "Type"]); let expanded = expand_parse_call(cx, sp, "parse_path_panic", vec![mode], tts); base::MacEager::expr(expanded) } fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> { strs.iter().map(|s| ast::Ident::from_str(s)).collect() } fn id_ext(s: &str) -> ast::Ident { ast::Ident::from_str(s) } // Lift an ident to the expr that evaluates to that ident. fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> { let e_str = cx.expr_str(sp, ident.name); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), vec![e_str]) } // Lift a name to the expr that evaluates to that name fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> { let e_str = cx.expr_str(sp, ident.name); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("name_of"), vec![e_str]) } fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { let idents = vec![id_ext("syntax"), id_ext("tokenstream"), id_ext("TokenTree"), id_ext(name)]; cx.expr_path(cx.path_global(sp, idents)) } fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { let idents = vec![id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name)]; cx.expr_path(cx.path_global(sp, idents)) } fn mk_parser_path(cx: &ExtCtxt, sp: Span, names: &[&str]) -> P<ast::Expr> { let mut idents = vec![id_ext("syntax"), id_ext("parse"), id_ext("parser")]; idents.extend(names.iter().cloned().map(id_ext)); cx.expr_path(cx.path_global(sp, idents)) } fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> { let name = match bop { token::Plus => "Plus", token::Minus => "Minus", token::Star => "Star", token::Slash => "Slash", token::Percent => "Percent", token::Caret => "Caret", token::And => "And", token::Or => "Or", token::Shl => "Shl", token::Shr => "Shr" }; mk_token_path(cx, sp, name) } fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> { let name = match delim { token::Paren => "Paren", token::Bracket => "Bracket", token::Brace => "Brace", token::NoDelim => "NoDelim", }; mk_token_path(cx, sp, name) } #[allow(non_upper_case_globals)] fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { macro_rules! mk_lit { ($name: expr, $suffix: expr, $content: expr $(, $count: expr)*) => {{ let name = mk_name(cx, sp, ast::Ident::with_empty_ctxt($content)); let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![ name $(, cx.expr_u16(sp, $count))* ]); let suffix = match $suffix { Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))), None => cx.expr_none(sp) }; cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix]) }} } let name = match *tok { token::BinOp(binop) => { return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec![mk_binop(cx, sp, binop)]); } token::BinOpEq(binop) => { return cx.expr_call(sp, mk_token_path(cx, sp, "BinOpEq"), vec![mk_binop(cx, sp, binop)]); } token::OpenDelim(delim) => { return cx.expr_call(sp, mk_token_path(cx, sp, "OpenDelim"), vec![mk_delim(cx, sp, delim)]); } token::CloseDelim(delim) => { return cx.expr_call(sp, mk_token_path(cx, sp, "CloseDelim"), vec![mk_delim(cx, sp, delim)]); } token::Literal(token::Byte(i), suf) => return mk_lit!("Byte", suf, i), token::Literal(token::Char(i), suf) => return mk_lit!("Char", suf, i), token::Literal(token::Integer(i), suf) => return mk_lit!("Integer", suf, i), token::Literal(token::Float(i), suf) => return mk_lit!("Float", suf, i), token::Literal(token::Str_(i), suf) => return mk_lit!("Str_", suf, i), token::Literal(token::StrRaw(i, n), suf) => return mk_lit!("StrRaw", suf, i, n), token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i), token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n), token::Ident(ident, is_raw) => { return cx.expr_call(sp, mk_token_path(cx, sp, "Ident"), vec![mk_ident(cx, sp, ident), cx.expr_bool(sp, is_raw)]); } token::Lifetime(ident) => { return cx.expr_call(sp, mk_token_path(cx, sp, "Lifetime"), vec![mk_ident(cx, sp, ident)]); } token::DocComment(ident) => { return cx.expr_call(sp, mk_token_path(cx, sp, "DocComment"), vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]); } token::Interpolated(_) => { cx.span_err(sp, "quote! with interpolated token"); // Use dummy name. "Interpolated" } token::Eq => "Eq", token::Lt => "Lt", token::Le => "Le", token::EqEq => "EqEq", token::Ne => "Ne", token::Ge => "Ge", token::Gt => "Gt", token::AndAnd => "AndAnd", token::OrOr => "OrOr", token::Not => "Not", token::Tilde => "Tilde", token::At => "At", token::Dot => "Dot", token::DotDot => "DotDot", token::DotEq => "DotEq", token::DotDotDot => "DotDotDot", token::DotDotEq => "DotDotEq", token::Comma => "Comma", token::Semi => "Semi", token::Colon => "Colon", token::ModSep => "ModSep", token::RArrow => "RArrow", token::LArrow => "LArrow", token::FatArrow => "FatArrow", token::Pound => "Pound", token::Dollar => "Dollar", token::Question => "Question", token::SingleQuote => "SingleQuote", token::Eof => "Eof", token::Whitespace | token::Comment | token::Shebang(_) => { panic!("unhandled token in quote!"); } }; mk_token_path(cx, sp, name) } fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> { match *tt { TokenTree::Token(sp, token::Ident(ident, _)) if quoted => { // tt.extend($ident.to_tokens(ext_cx)) let e_to_toks = cx.expr_method_call(sp, cx.expr_ident(sp, ident), id_ext("to_tokens"), vec![cx.expr_ident(sp, id_ext("ext_cx"))]); let e_to_toks = cx.expr_method_call(sp, e_to_toks, id_ext("into_iter"), vec![]); let e_push = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("tt")), id_ext("extend"), vec![e_to_toks]); vec![cx.stmt_expr(e_push)] } TokenTree::Token(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call(sp, mk_tt_path(cx, sp, "Token"), vec![e_sp, expr_mk_token(cx, sp, tok)]); let e_push = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("tt")), id_ext("push"), vec![e_tok]); vec![cx.stmt_expr(e_push)] }, TokenTree::Delimited(span, ref delimed) => { let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false); stmts.extend(statements_mk_tts(cx, delimed.stream())); stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false)); stmts } } } fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) -> (P<ast::Expr>, Vec<TokenTree>) { let mut p = cx.new_parser_from_tts(tts); let cx_expr = panictry!(p.parse_expr()); if !p.eat(&token::Comma) { let _ = p.diagnostic().fatal("expected token `,`"); } let tts = panictry!(p.parse_all_token_trees()); p.abort_if_errors(); (cx_expr, tts) } fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> { // We also bind a single value, sp, to ext_cx.call_site() // // This causes every span in a token-tree quote to be attributed to the // call site of the extension using the quote. We can't really do much // better since the source of the quote may well be in a library that // was not even parsed by this compilation run, that the user has no // source code for (eg. in libsyntax, which they're just _using_). // // The old quasiquoter had an elaborate mechanism for denoting input // file locations from which quotes originated; unfortunately this // relied on feeding the source string of the quote back into the // compiler (which we don't really want to do) and, in any case, only // pushed the problem a very small step further back: an error // resulting from a parse of the resulting quote is still attributed to // the site the string literal occurred, which was in a source file // _other_ than the one the user has control over. For example, an // error in a quote from the protocol compiler, invoked in user code // using macro_rules! for example, will be attributed to the macro_rules.rs // file in libsyntax, which the user might not even have source to (unless // they happen to have a compiler on hand). Over all, the phase distinction // just makes quotes "hard to attribute". Possibly this could be fixed // by recreating some of the original qq machinery in the tt regime // (pushing fake SourceFiles onto the parser to account for original sites // of quotes, for example) but at this point it seems not likely to be // worth the hassle. let e_sp = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("call_site"), Vec::new()); let stmt_let_sp = cx.stmt_let(sp, false, id_ext("_sp"), e_sp); let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); vec![stmt_let_sp, stmt_let_tt] } fn statements_mk_tts(cx: &ExtCtxt, tts: TokenStream) -> Vec<ast::Stmt> { let mut ss = Vec::new(); let mut quoted = false; for tt in tts.into_trees() { quoted = match tt { TokenTree::Token(_, token::Dollar) if !quoted => true, _ => { ss.extend(statements_mk_tt(cx, &tt, quoted)); false } } } ss } fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) { let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); let mut vector = mk_stmts_let(cx, sp); vector.extend(statements_mk_tts(cx, tts.iter().cloned().collect())); vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); let block = cx.expr_block(cx.block(sp, vector)); let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")]; (cx_expr, cx.expr_call_global(sp, unflatten, vec![block])) } fn expand_wrapper(cx: &ExtCtxt, sp: Span, cx_expr: P<ast::Expr>, expr: P<ast::Expr>, imports: &[&[&str]]) -> P<ast::Expr> { // Explicitly borrow to avoid moving from the invoker (#16992) let cx_expr_borrow = cx.expr_addr_of(sp, cx.expr_deref(sp, cx_expr)); let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr_borrow); let mut stmts = imports.iter().map(|path| { // make item: `use ...;` let path = path.iter().map(|s| s.to_string()).collect(); let use_item = cx.item_use_glob( sp, respan(sp.shrink_to_lo(), ast::VisibilityKind::Inherited), ids_ext(path), ); cx.stmt_item(sp, use_item) }).chain(Some(stmt_let_ext_cx)).collect::<Vec<_>>(); stmts.push(cx.stmt_expr(expr)); cx.expr_block(cx.block(sp, stmts)) } fn expand_parse_call(cx: &ExtCtxt, sp: Span, parse_method: &str, arg_exprs: Vec<P<ast::Expr>> , tts: &[TokenTree]) -> P<ast::Expr> { let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); let parse_sess_call = || cx.expr_method_call( sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("parse_sess"), Vec::new()); let new_parser_call = cx.expr_call(sp, cx.expr_ident(sp, id_ext("new_parser_from_tts")), vec![parse_sess_call(), tts_expr]); let path = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext(parse_method)]; let mut args = vec![cx.expr_mut_addr_of(sp, new_parser_call)]; args.extend(arg_exprs); let expr = cx.expr_call_global(sp, path, args); if parse_method == "parse_attribute" { expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"], &["syntax", "parse", "attr"]]) } else { expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/source_util.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast; use syntax_pos::{self, Pos, Span, FileName}; use ext::base::*; use ext::base; use ext::build::AstBuilder; use parse::{token, DirectoryOwnership}; use parse; use print::pprust; use ptr::P; use OneVector; use symbol::Symbol; use tokenstream; use std::fs::File; use std::io::prelude::*; use std::path::PathBuf; use rustc_data_structures::sync::Lrc; // These macros all relate to the file system; they either return // the column/row/filename of the expression, or they include // a given file into the current one. /// line!(): expands to the current line number pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "line!"); let topmost = cx.expansion_cause().unwrap_or(sp); let loc = cx.source_map().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32)) } /* column!(): expands to the current column number */ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "column!"); let topmost = cx.expansion_cause().unwrap_or(sp); let loc = cx.source_map().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1)) } /* __rust_unstable_column!(): expands to the current column number */ pub fn expand_column_gated(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { if sp.allows_unstable() { expand_column(cx, sp, tts) } else { cx.span_fatal(sp, "the __rust_unstable_column macro is unstable"); } } /// file!(): expands to the current filename */ /// The source_file (`loc.file`) contains a bunch more information we could spit /// out if we wanted. pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "file!"); let topmost = cx.expansion_cause().unwrap_or(sp); let loc = cx.source_map().lookup_char_pos(topmost.lo()); base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string()))) } pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { let s = pprust::tts_to_string(tts); base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "module_path!"); let mod_path = &cx.current_expansion.module.mod_path; let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::"); base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string))) } /// include! : parse the given file as an expr /// This is generally a bad idea because it's going to behave /// unhygienically. pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'cx> { let file = match get_single_str_from_tts(cx, sp, tts, "include!") { Some(f) => f, None => return DummyResult::expr(sp), }; // The file will be added to the code map by the parser let path = res_rel_file(cx, sp, file); let directory_ownership = DirectoryOwnership::Owned { relative: None }; let p = parse::new_sub_parser_from_file(cx.parse_sess(), &path, directory_ownership, None, sp); struct ExpandResult<'a> { p: parse::parser::Parser<'a>, } impl<'a> base::MacResult for ExpandResult<'a> { fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> { Some(panictry!(self.p.parse_expr())) } fn make_items(mut self: Box<ExpandResult<'a>>) -> Option<OneVector<P<ast::Item>>> { let mut ret = OneVector::new(); while self.p.token != token::Eof { match panictry!(self.p.parse_item()) { Some(item) => ret.push(item), None => self.p.diagnostic().span_fatal(self.p.span, &format!("expected item, found `{}`", self.p.this_token_to_string())) .raise() } } Some(ret) } } Box::new(ExpandResult { p: p }) } // include_str! : read the given file, insert it as a literal string expr pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") { Some(f) => f, None => return DummyResult::expr(sp) }; let file = res_rel_file(cx, sp, file); let mut bytes = Vec::new(); match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) { Ok(..) => {} Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); return DummyResult::expr(sp); } }; match String::from_utf8(bytes) { Ok(src) => { let interned_src = Symbol::intern(&src); // Add this input file to the code map to make it available as // dependency information cx.source_map().new_source_file(file.into(), src); base::MacEager::expr(cx.expr_str(sp, interned_src)) } Err(_) => { cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); DummyResult::expr(sp) } } } pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult+'static> { let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") { Some(f) => f, None => return DummyResult::expr(sp) }; let file = res_rel_file(cx, sp, file); let mut bytes = Vec::new(); match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) { Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); DummyResult::expr(sp) } Ok(..) => { // Add this input file to the code map to make it available as // dependency information, but don't enter it's contents cx.source_map().new_source_file(file.into(), "".to_string()); base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))) } } } // resolve a file-system path to an absolute file-system path (if it // isn't already) fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: String) -> PathBuf { let arg = PathBuf::from(arg); // Relative paths are resolved relative to the file in which they are found // after macro expansion (that is, they are unhygienic). if !arg.is_absolute() { let callsite = sp.source_callsite(); let mut path = match cx.source_map().span_to_unmapped_path(callsite) { FileName::Real(path) => path, other => panic!("cannot resolve relative path in non-file source `{}`", other), }; path.pop(); path.push(arg); path } else { arg } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/build.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use rustc_target::spec::abi::Abi; use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind}; use attr; use syntax_pos::{Pos, Span, DUMMY_SP}; use source_map::{dummy_spanned, respan, Spanned}; use ext::base::ExtCtxt; use ptr::P; use symbol::{Symbol, keywords}; use ThinVec; // Transitional re-exports so qquote can find the paths it is looking for mod syntax { pub use ext; pub use parse; } pub trait AstBuilder { // paths fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path; fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path; fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path; fn path_all(&self, sp: Span, global: bool, idents: Vec<ast::Ident>, args: Vec<ast::GenericArg>, bindings: Vec<ast::TypeBinding>) -> ast::Path; fn qpath(&self, self_type: P<ast::Ty>, trait_path: ast::Path, ident: ast::Ident) -> (ast::QSelf, ast::Path); fn qpath_all(&self, self_type: P<ast::Ty>, trait_path: ast::Path, ident: ast::Ident, args: Vec<ast::GenericArg>, bindings: Vec<ast::TypeBinding>) -> (ast::QSelf, ast::Path); // types fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy; fn ty(&self, span: Span, ty: ast::TyKind) -> P<ast::Ty>; fn ty_path(&self, path: ast::Path) -> P<ast::Ty>; fn ty_ident(&self, span: Span, idents: ast::Ident) -> P<ast::Ty>; fn ty_rptr(&self, span: Span, ty: P<ast::Ty>, lifetime: Option<ast::Lifetime>, mutbl: ast::Mutability) -> P<ast::Ty>; fn ty_ptr(&self, span: Span, ty: P<ast::Ty>, mutbl: ast::Mutability) -> P<ast::Ty>; fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty>; fn ty_infer(&self, sp: Span) -> P<ast::Ty>; fn typaram(&self, span: Span, id: ast::Ident, attrs: Vec<ast::Attribute>, bounds: ast::GenericBounds, default: Option<P<ast::Ty>>) -> ast::GenericParam; fn trait_ref(&self, path: ast::Path) -> ast::TraitRef; fn poly_trait_ref(&self, span: Span, path: ast::Path) -> ast::PolyTraitRef; fn trait_bound(&self, path: ast::Path) -> ast::GenericBound; fn lifetime(&self, span: Span, ident: ast::Ident) -> ast::Lifetime; fn lifetime_def(&self, span: Span, ident: ast::Ident, attrs: Vec<ast::Attribute>, bounds: ast::GenericBounds) -> ast::GenericParam; // statements fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt; fn stmt_semi(&self, expr: P<ast::Expr>) -> ast::Stmt; fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: P<ast::Expr>) -> ast::Stmt; fn stmt_let_typed(&self, sp: Span, mutbl: bool, ident: ast::Ident, typ: P<ast::Ty>, ex: P<ast::Expr>) -> ast::Stmt; fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt; fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt; // blocks fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block>; fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block>; // expressions fn expr(&self, span: Span, node: ast::ExprKind) -> P<ast::Expr>; fn expr_path(&self, path: ast::Path) -> P<ast::Expr>; fn expr_qpath(&self, span: Span, qself: ast::QSelf, path: ast::Path) -> P<ast::Expr>; fn expr_ident(&self, span: Span, id: ast::Ident) -> P<ast::Expr>; fn expr_self(&self, span: Span) -> P<ast::Expr>; fn expr_binary(&self, sp: Span, op: ast::BinOpKind, lhs: P<ast::Expr>, rhs: P<ast::Expr>) -> P<ast::Expr>; fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>; fn expr_unary(&self, sp: Span, op: ast::UnOp, e: P<ast::Expr>) -> P<ast::Expr>; fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>; fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>; fn expr_field_access(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>; fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr>; fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr>; fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr>; fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident>, args: Vec<P<ast::Expr>> ) -> P<ast::Expr>; fn expr_method_call(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident, args: Vec<P<ast::Expr>> ) -> P<ast::Expr>; fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr>; fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr>; fn field_imm(&self, span: Span, name: Ident, e: P<ast::Expr>) -> ast::Field; fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field>) -> P<ast::Expr>; fn expr_struct_ident(&self, span: Span, id: ast::Ident, fields: Vec<ast::Field>) -> P<ast::Expr>; fn expr_lit(&self, sp: Span, lit: ast::LitKind) -> P<ast::Expr>; fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr>; fn expr_isize(&self, sp: Span, i: isize) -> P<ast::Expr>; fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr>; fn expr_u16(&self, sp: Span, u: u16) -> P<ast::Expr>; fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr>; fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr>; fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>; fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr>; fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>; fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr>; fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr>; fn expr_none(&self, sp: Span) -> P<ast::Expr>; fn expr_break(&self, sp: Span) -> P<ast::Expr>; fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>; fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr>; fn expr_unreachable(&self, span: Span) -> P<ast::Expr>; fn expr_ok(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>; fn expr_err(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>; fn expr_try(&self, span: Span, head: P<ast::Expr>) -> P<ast::Expr>; fn pat(&self, span: Span, pat: PatKind) -> P<ast::Pat>; fn pat_wild(&self, span: Span) -> P<ast::Pat>; fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat>; fn pat_ident(&self, span: Span, ident: ast::Ident) -> P<ast::Pat>; fn pat_ident_binding_mode(&self, span: Span, ident: ast::Ident, bm: ast::BindingMode) -> P<ast::Pat>; fn pat_path(&self, span: Span, path: ast::Path) -> P<ast::Pat>; fn pat_tuple_struct(&self, span: Span, path: ast::Path, subpats: Vec<P<ast::Pat>>) -> P<ast::Pat>; fn pat_struct(&self, span: Span, path: ast::Path, field_pats: Vec<Spanned<ast::FieldPat>>) -> P<ast::Pat>; fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat>; fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat>; fn pat_none(&self, span: Span) -> P<ast::Pat>; fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat>; fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat>; fn arm(&self, span: Span, pats: Vec<P<ast::Pat>>, expr: P<ast::Expr>) -> ast::Arm; fn arm_unreachable(&self, span: Span) -> ast::Arm; fn expr_match(&self, span: Span, arg: P<ast::Expr>, arms: Vec<ast::Arm> ) -> P<ast::Expr>; fn expr_if(&self, span: Span, cond: P<ast::Expr>, then: P<ast::Expr>, els: Option<P<ast::Expr>>) -> P<ast::Expr>; fn expr_loop(&self, span: Span, block: P<ast::Block>) -> P<ast::Expr>; fn lambda_fn_decl(&self, span: Span, fn_decl: P<ast::FnDecl>, body: P<ast::Expr>, fn_decl_span: Span) -> P<ast::Expr>; fn lambda(&self, span: Span, ids: Vec<ast::Ident>, body: P<ast::Expr>) -> P<ast::Expr>; fn lambda0(&self, span: Span, body: P<ast::Expr>) -> P<ast::Expr>; fn lambda1(&self, span: Span, body: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>; fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident>, blk: Vec<ast::Stmt>) -> P<ast::Expr>; fn lambda_stmts_0(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Expr>; fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>, ident: ast::Ident) -> P<ast::Expr>; // items fn item(&self, span: Span, name: Ident, attrs: Vec<ast::Attribute> , node: ast::ItemKind) -> P<ast::Item>; fn arg(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::Arg; // FIXME unused self fn fn_decl(&self, inputs: Vec<ast::Arg> , output: ast::FunctionRetTy) -> P<ast::FnDecl>; fn item_fn_poly(&self, span: Span, name: Ident, inputs: Vec<ast::Arg> , output: P<ast::Ty>, generics: Generics, body: P<ast::Block>) -> P<ast::Item>; fn item_fn(&self, span: Span, name: Ident, inputs: Vec<ast::Arg> , output: P<ast::Ty>, body: P<ast::Block>) -> P<ast::Item>; fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant; fn item_enum_poly(&self, span: Span, name: Ident, enum_definition: ast::EnumDef, generics: Generics) -> P<ast::Item>; fn item_enum(&self, span: Span, name: Ident, enum_def: ast::EnumDef) -> P<ast::Item>; fn item_struct_poly(&self, span: Span, name: Ident, struct_def: ast::VariantData, generics: Generics) -> P<ast::Item>; fn item_struct(&self, span: Span, name: Ident, struct_def: ast::VariantData) -> P<ast::Item>; fn item_mod(&self, span: Span, inner_span: Span, name: Ident, attrs: Vec<ast::Attribute>, items: Vec<P<ast::Item>>) -> P<ast::Item>; fn item_extern_crate(&self, span: Span, name: Ident) -> P<ast::Item>; fn item_static(&self, span: Span, name: Ident, ty: P<ast::Ty>, mutbl: ast::Mutability, expr: P<ast::Expr>) -> P<ast::Item>; fn item_const(&self, span: Span, name: Ident, ty: P<ast::Ty>, expr: P<ast::Expr>) -> P<ast::Item>; fn item_ty_poly(&self, span: Span, name: Ident, ty: P<ast::Ty>, generics: Generics) -> P<ast::Item>; fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> P<ast::Item>; fn attribute(&self, sp: Span, mi: ast::MetaItem) -> ast::Attribute; fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem; fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem; fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec<ast::NestedMetaItem> ) -> ast::MetaItem; fn meta_name_value(&self, sp: Span, name: ast::Name, value: ast::LitKind) -> ast::MetaItem; fn item_use(&self, sp: Span, vis: ast::Visibility, vp: P<ast::UseTree>) -> P<ast::Item>; fn item_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> P<ast::Item>; fn item_use_simple_(&self, sp: Span, vis: ast::Visibility, ident: Option<ast::Ident>, path: ast::Path) -> P<ast::Item>; fn item_use_list(&self, sp: Span, vis: ast::Visibility, path: Vec<ast::Ident>, imports: &[ast::Ident]) -> P<ast::Item>; fn item_use_glob(&self, sp: Span, vis: ast::Visibility, path: Vec<ast::Ident>) -> P<ast::Item>; } impl<'a> AstBuilder for ExtCtxt<'a> { fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path { self.path_all(span, false, strs, vec![], vec![]) } fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path { self.path(span, vec![id]) } fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path { self.path_all(span, true, strs, vec![], vec![]) } fn path_all(&self, span: Span, global: bool, mut idents: Vec<ast::Ident> , args: Vec<ast::GenericArg>, bindings: Vec<ast::TypeBinding> ) -> ast::Path { let last_ident = idents.pop().unwrap(); let mut segments: Vec<ast::PathSegment> = vec![]; segments.extend(idents.into_iter().map(|ident| { ast::PathSegment::from_ident(ident.with_span_pos(span)) })); let args = if !args.is_empty() || !bindings.is_empty() { ast::AngleBracketedArgs { args, bindings, span }.into() } else { None }; segments.push(ast::PathSegment { ident: last_ident.with_span_pos(span), args }); let mut path = ast::Path { span, segments }; if global { if let Some(seg) = path.make_root() { path.segments.insert(0, seg); } } path } /// Constructs a qualified path. /// /// Constructs a path like `<self_type as trait_path>::ident`. fn qpath(&self, self_type: P<ast::Ty>, trait_path: ast::Path, ident: ast::Ident) -> (ast::QSelf, ast::Path) { self.qpath_all(self_type, trait_path, ident, vec![], vec![]) } /// Constructs a qualified path. /// /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A=Bar>`. fn qpath_all(&self, self_type: P<ast::Ty>, trait_path: ast::Path, ident: ast::Ident, args: Vec<ast::GenericArg>, bindings: Vec<ast::TypeBinding>) -> (ast::QSelf, ast::Path) { let mut path = trait_path; let args = if !args.is_empty() || !bindings.is_empty() { ast::AngleBracketedArgs { args, bindings, span: ident.span }.into() } else { None }; path.segments.push(ast::PathSegment { ident, args }); (ast::QSelf { ty: self_type, path_span: path.span, position: path.segments.len() - 1 }, path) } fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy { ast::MutTy { ty, mutbl, } } fn ty(&self, span: Span, ty: ast::TyKind) -> P<ast::Ty> { P(ast::Ty { id: ast::DUMMY_NODE_ID, span, node: ty }) } fn ty_path(&self, path: ast::Path) -> P<ast::Ty> { self.ty(path.span, ast::TyKind::Path(None, path)) } // Might need to take bounds as an argument in the future, if you ever want // to generate a bounded existential trait type. fn ty_ident(&self, span: Span, ident: ast::Ident) -> P<ast::Ty> { self.ty_path(self.path_ident(span, ident)) } fn ty_rptr(&self, span: Span, ty: P<ast::Ty>, lifetime: Option<ast::Lifetime>, mutbl: ast::Mutability) -> P<ast::Ty> { self.ty(span, ast::TyKind::Rptr(lifetime, self.ty_mt(ty, mutbl))) } fn ty_ptr(&self, span: Span, ty: P<ast::Ty>, mutbl: ast::Mutability) -> P<ast::Ty> { self.ty(span, ast::TyKind::Ptr(self.ty_mt(ty, mutbl))) } fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty> { self.ty_path( self.path_all(DUMMY_SP, true, self.std_path(&["option", "Option"]), vec![ast::GenericArg::Type(ty)], Vec::new())) } fn ty_infer(&self, span: Span) -> P<ast::Ty> { self.ty(span, ast::TyKind::Infer) } fn typaram(&self, span: Span, ident: ast::Ident, attrs: Vec<ast::Attribute>, bounds: ast::GenericBounds, default: Option<P<ast::Ty>>) -> ast::GenericParam { ast::GenericParam { ident: ident.with_span_pos(span), id: ast::DUMMY_NODE_ID, attrs: attrs.into(), bounds, kind: ast::GenericParamKind::Type { default, } } } fn trait_ref(&self, path: ast::Path) -> ast::TraitRef { ast::TraitRef { path, ref_id: ast::DUMMY_NODE_ID, } } fn poly_trait_ref(&self, span: Span, path: ast::Path) -> ast::PolyTraitRef { ast::PolyTraitRef { bound_generic_params: Vec::new(), trait_ref: self.trait_ref(path), span, } } fn trait_bound(&self, path: ast::Path) -> ast::GenericBound { ast::GenericBound::Trait(self.poly_trait_ref(path.span, path), ast::TraitBoundModifier::None) } fn lifetime(&self, span: Span, ident: ast::Ident) -> ast::Lifetime { ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) } } fn lifetime_def(&self, span: Span, ident: ast::Ident, attrs: Vec<ast::Attribute>, bounds: ast::GenericBounds) -> ast::GenericParam { let lifetime = self.lifetime(span, ident); ast::GenericParam { ident: lifetime.ident, id: lifetime.id, attrs: attrs.into(), bounds, kind: ast::GenericParamKind::Lifetime, } } fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt { ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, node: ast::StmtKind::Expr(expr), } } fn stmt_semi(&self, expr: P<ast::Expr>) -> ast::Stmt { ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, node: ast::StmtKind::Semi(expr), } } fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: P<ast::Expr>) -> ast::Stmt { let pat = if mutbl { let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Mutable); self.pat_ident_binding_mode(sp, ident, binding_mode) } else { self.pat_ident(sp, ident) }; let local = P(ast::Local { pat, ty: None, init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, attrs: ThinVec::new(), }); ast::Stmt { id: ast::DUMMY_NODE_ID, node: ast::StmtKind::Local(local), span: sp, } } fn stmt_let_typed(&self, sp: Span, mutbl: bool, ident: ast::Ident, typ: P<ast::Ty>, ex: P<ast::Expr>) -> ast::Stmt { let pat = if mutbl { let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Mutable); self.pat_ident_binding_mode(sp, ident, binding_mode) } else { self.pat_ident(sp, ident) }; let local = P(ast::Local { pat, ty: Some(typ), init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, attrs: ThinVec::new(), }); ast::Stmt { id: ast::DUMMY_NODE_ID, node: ast::StmtKind::Local(local), span: sp, } } // Generate `let _: Type;`, usually used for type assertions. fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt { let local = P(ast::Local { pat: self.pat_wild(span), ty: Some(ty), init: None, id: ast::DUMMY_NODE_ID, span, attrs: ThinVec::new(), }); ast::Stmt { id: ast::DUMMY_NODE_ID, node: ast::StmtKind::Local(local), span, } } fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt { ast::Stmt { id: ast::DUMMY_NODE_ID, node: ast::StmtKind::Item(item), span: sp, } } fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> { self.block(expr.span, vec![ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, node: ast::StmtKind::Expr(expr), }]) } fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block> { P(ast::Block { stmts, id: ast::DUMMY_NODE_ID, rules: BlockCheckMode::Default, span, recovered: false, }) } fn expr(&self, span: Span, node: ast::ExprKind) -> P<ast::Expr> { P(ast::Expr { id: ast::DUMMY_NODE_ID, node, span, attrs: ThinVec::new(), }) } fn expr_path(&self, path: ast::Path) -> P<ast::Expr> { self.expr(path.span, ast::ExprKind::Path(None, path)) } /// Constructs a QPath expression. fn expr_qpath(&self, span: Span, qself: ast::QSelf, path: ast::Path) -> P<ast::Expr> { self.expr(span, ast::ExprKind::Path(Some(qself), path)) } fn expr_ident(&self, span: Span, id: ast::Ident) -> P<ast::Expr> { self.expr_path(self.path_ident(span, id)) } fn expr_self(&self, span: Span) -> P<ast::Expr> { self.expr_ident(span, keywords::SelfValue.ident()) } fn expr_binary(&self, sp: Span, op: ast::BinOpKind, lhs: P<ast::Expr>, rhs: P<ast::Expr>) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Binary(Spanned { node: op, span: sp }, lhs, rhs)) } fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> { self.expr_unary(sp, UnOp::Deref, e) } fn expr_unary(&self, sp: Span, op: ast::UnOp, e: P<ast::Expr>) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Unary(op, e)) } fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Field(expr, ident.with_span_pos(sp))) } fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr> { let ident = Ident::from_str(&idx.to_string()).with_span_pos(sp); self.expr(sp, ast::ExprKind::Field(expr, ident)) } fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::AddrOf(ast::Mutability::Immutable, e)) } fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::AddrOf(ast::Mutability::Mutable, e)) } fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr> { self.expr(span, ast::ExprKind::Call(expr, args)) } fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr> { self.expr(span, ast::ExprKind::Call(self.expr_ident(span, id), args)) } fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> , args: Vec<P<ast::Expr>> ) -> P<ast::Expr> { let pathexpr = self.expr_path(self.path_global(sp, fn_path)); self.expr_call(sp, pathexpr, args) } fn expr_method_call(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident, mut args: Vec<P<ast::Expr>> ) -> P<ast::Expr> { args.insert(0, expr); let segment = ast::PathSegment::from_ident(ident.with_span_pos(span)); self.expr(span, ast::ExprKind::MethodCall(segment, args)) } fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> { self.expr(b.span, ast::ExprKind::Block(b, None)) } fn field_imm(&self, span: Span, ident: Ident, e: P<ast::Expr>) -> ast::Field { ast::Field { ident: ident.with_span_pos(span), expr: e, span, is_shorthand: false, attrs: ThinVec::new(), } } fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field>) -> P<ast::Expr> { self.expr(span, ast::ExprKind::Struct(path, fields, None)) } fn expr_struct_ident(&self, span: Span, id: ast::Ident, fields: Vec<ast::Field>) -> P<ast::Expr> { self.expr_struct(span, self.path_ident(span, id), fields) } fn expr_lit(&self, sp: Span, lit: ast::LitKind) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Lit(P(respan(sp, lit)))) } fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> { self.expr_lit(span, ast::LitKind::Int(i as u128, ast::LitIntType::Unsigned(ast::UintTy::Usize))) } fn expr_isize(&self, sp: Span, i: isize) -> P<ast::Expr> { if i < 0 { let i = (-i) as u128; let lit_ty = ast::LitIntType::Signed(ast::IntTy::Isize); let lit = self.expr_lit(sp, ast::LitKind::Int(i, lit_ty)); self.expr_unary(sp, ast::UnOp::Neg, lit) } else { self.expr_lit(sp, ast::LitKind::Int(i as u128, ast::LitIntType::Signed(ast::IntTy::Isize))) } } fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr> { self.expr_lit(sp, ast::LitKind::Int(u as u128, ast::LitIntType::Unsigned(ast::UintTy::U32))) } fn expr_u16(&self, sp: Span, u: u16) -> P<ast::Expr> { self.expr_lit(sp, ast::LitKind::Int(u as u128, ast::LitIntType::Unsigned(ast::UintTy::U16))) } fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr> { self.expr_lit(sp, ast::LitKind::Int(u as u128, ast::LitIntType::Unsigned(ast::UintTy::U8))) } fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> { self.expr_lit(sp, ast::LitKind::Bool(value)) } fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Array(exprs)) } fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr> { self.expr_call_global(sp, self.std_path(&["vec", "Vec", "new"]), Vec::new()) } fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> { self.expr_addr_of(sp, self.expr_vec(sp, exprs)) } fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> { self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked)) } fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Cast(expr, ty)) } fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> { let some = self.std_path(&["option", "Option", "Some"]); self.expr_call_global(sp, some, vec![expr]) } fn expr_none(&self, sp: Span) -> P<ast::Expr> { let none = self.std_path(&["option", "Option", "None"]); let none = self.path_global(sp, none); self.expr_path(none) } fn expr_break(&self, sp: Span) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Break(None, None)) } fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> { self.expr(sp, ast::ExprKind::Tup(exprs)) } fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> { let loc = self.source_map().lookup_char_pos(span.lo()); let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name.to_string())); let expr_line = self.expr_u32(span, loc.line as u32); let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1); let expr_loc_tuple = self.expr_tuple(span, vec![expr_file, expr_line, expr_col]); let expr_loc_ptr = self.expr_addr_of(span, expr_loc_tuple); self.expr_call_global( span, self.std_path(&["rt", "begin_panic"]), vec![ self.expr_str(span, msg), expr_loc_ptr]) } fn expr_unreachable(&self, span: Span) -> P<ast::Expr> { self.expr_fail(span, Symbol::intern("internal error: entered unreachable code")) } fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> { let ok = self.std_path(&["result", "Result", "Ok"]); self.expr_call_global(sp, ok, vec![expr]) } fn expr_err(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> { let err = self.std_path(&["result", "Result", "Err"]); self.expr_call_global(sp, err, vec![expr]) } fn expr_try(&self, sp: Span, head: P<ast::Expr>) -> P<ast::Expr> { let ok = self.std_path(&["result", "Result", "Ok"]); let ok_path = self.path_global(sp, ok); let err = self.std_path(&["result", "Result", "Err"]); let err_path = self.path_global(sp, err); let binding_variable = self.ident_of("__try_var"); let binding_pat = self.pat_ident(sp, binding_variable); let binding_expr = self.expr_ident(sp, binding_variable); // Ok(__try_var) pattern let ok_pat = self.pat_tuple_struct(sp, ok_path, vec![binding_pat.clone()]); // Err(__try_var) (pattern and expression resp.) let err_pat = self.pat_tuple_struct(sp, err_path.clone(), vec![binding_pat]); let err_inner_expr = self.expr_call(sp, self.expr_path(err_path), vec![binding_expr.clone()]); // return Err(__try_var) let err_expr = self.expr(sp, ast::ExprKind::Ret(Some(err_inner_expr))); // Ok(__try_var) => __try_var let ok_arm = self.arm(sp, vec![ok_pat], binding_expr); // Err(__try_var) => return Err(__try_var) let err_arm = self.arm(sp, vec![err_pat], err_expr); // match head { Ok() => ..., Err() => ... } self.expr_match(sp, head, vec![ok_arm, err_arm]) } fn pat(&self, span: Span, pat: PatKind) -> P<ast::Pat> { P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span }) } fn pat_wild(&self, span: Span) -> P<ast::Pat> { self.pat(span, PatKind::Wild) } fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat> { self.pat(span, PatKind::Lit(expr)) } fn pat_ident(&self, span: Span, ident: ast::Ident) -> P<ast::Pat> { let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Immutable); self.pat_ident_binding_mode(span, ident, binding_mode) } fn pat_ident_binding_mode(&self, span: Span, ident: ast::Ident, bm: ast::BindingMode) -> P<ast::Pat> { let pat = PatKind::Ident(bm, ident.with_span_pos(span), None); self.pat(span, pat) } fn pat_path(&self, span: Span, path: ast::Path) -> P<ast::Pat> { self.pat(span, PatKind::Path(None, path)) } fn pat_tuple_struct(&self, span: Span, path: ast::Path, subpats: Vec<P<ast::Pat>>) -> P<ast::Pat> { self.pat(span, PatKind::TupleStruct(path, subpats, None)) } fn pat_struct(&self, span: Span, path: ast::Path, field_pats: Vec<Spanned<ast::FieldPat>>) -> P<ast::Pat> { self.pat(span, PatKind::Struct(path, field_pats, false)) } fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat> { self.pat(span, PatKind::Tuple(pats, None)) } fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> { let some = self.std_path(&["option", "Option", "Some"]); let path = self.path_global(span, some); self.pat_tuple_struct(span, path, vec![pat]) } fn pat_none(&self, span: Span) -> P<ast::Pat> { let some = self.std_path(&["option", "Option", "None"]); let path = self.path_global(span, some); self.pat_path(span, path) } fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> { let some = self.std_path(&["result", "Result", "Ok"]); let path = self.path_global(span, some); self.pat_tuple_struct(span, path, vec![pat]) } fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> { let some = self.std_path(&["result", "Result", "Err"]); let path = self.path_global(span, some); self.pat_tuple_struct(span, path, vec![pat]) } fn arm(&self, _span: Span, pats: Vec<P<ast::Pat>>, expr: P<ast::Expr>) -> ast::Arm { ast::Arm { attrs: vec![], pats, guard: None, body: expr, } } fn arm_unreachable(&self, span: Span) -> ast::Arm { self.arm(span, vec![self.pat_wild(span)], self.expr_unreachable(span)) } fn expr_match(&self, span: Span, arg: P<ast::Expr>, arms: Vec<ast::Arm>) -> P<Expr> { self.expr(span, ast::ExprKind::Match(arg, arms)) } fn expr_if(&self, span: Span, cond: P<ast::Expr>, then: P<ast::Expr>, els: Option<P<ast::Expr>>) -> P<ast::Expr> { let els = els.map(|x| self.expr_block(self.block_expr(x))); self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els)) } fn expr_loop(&self, span: Span, block: P<ast::Block>) -> P<ast::Expr> { self.expr(span, ast::ExprKind::Loop(block, None)) } fn lambda_fn_decl(&self, span: Span, fn_decl: P<ast::FnDecl>, body: P<ast::Expr>, fn_decl_span: Span) // span of the `|...|` part -> P<ast::Expr> { self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, ast::IsAsync::NotAsync, ast::Movability::Movable, fn_decl, body, fn_decl_span)) } fn lambda(&self, span: Span, ids: Vec<ast::Ident>, body: P<ast::Expr>) -> P<ast::Expr> { let fn_decl = self.fn_decl( ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(), ast::FunctionRetTy::Default(span)); // FIXME -- We are using `span` as the span of the `|...|` // part of the lambda, but it probably (maybe?) corresponds to // the entire lambda body. Probably we should extend the API // here, but that's not entirely clear. self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, ast::IsAsync::NotAsync, ast::Movability::Movable, fn_decl, body, span)) } fn lambda0(&self, span: Span, body: P<ast::Expr>) -> P<ast::Expr> { self.lambda(span, Vec::new(), body) } fn lambda1(&self, span: Span, body: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> { self.lambda(span, vec![ident], body) } fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident>, stmts: Vec<ast::Stmt>) -> P<ast::Expr> { self.lambda(span, ids, self.expr_block(self.block(span, stmts))) } fn lambda_stmts_0(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Expr> { self.lambda0(span, self.expr_block(self.block(span, stmts))) } fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>, ident: ast::Ident) -> P<ast::Expr> { self.lambda1(span, self.expr_block(self.block(span, stmts)), ident) } fn arg(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Arg { let arg_pat = self.pat_ident(span, ident); ast::Arg { ty, pat: arg_pat, id: ast::DUMMY_NODE_ID } } // FIXME unused self fn fn_decl(&self, inputs: Vec<ast::Arg>, output: ast::FunctionRetTy) -> P<ast::FnDecl> { P(ast::FnDecl { inputs, output, variadic: false }) } fn item(&self, span: Span, name: Ident, attrs: Vec<ast::Attribute>, node: ast::ItemKind) -> P<ast::Item> { // FIXME: Would be nice if our generated code didn't violate // Rust coding conventions P(ast::Item { ident: name, attrs, id: ast::DUMMY_NODE_ID, node, vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited), span, tokens: None, }) } fn item_fn_poly(&self, span: Span, name: Ident, inputs: Vec<ast::Arg> , output: P<ast::Ty>, generics: Generics, body: P<ast::Block>) -> P<ast::Item> { self.item(span, name, Vec::new(), ast::ItemKind::Fn(self.fn_decl(inputs, ast::FunctionRetTy::Ty(output)), ast::FnHeader { unsafety: ast::Unsafety::Normal, asyncness: ast::IsAsync::NotAsync, constness: dummy_spanned(ast::Constness::NotConst), abi: Abi::Rust, }, generics, body)) } fn item_fn(&self, span: Span, name: Ident, inputs: Vec<ast::Arg> , output: P<ast::Ty>, body: P<ast::Block> ) -> P<ast::Item> { self.item_fn_poly( span, name, inputs, output, Generics::default(), body) } fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant { let fields: Vec<_> = tys.into_iter().map(|ty| { ast::StructField { span: ty.span, ty, ident: None, vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited), attrs: Vec::new(), id: ast::DUMMY_NODE_ID, } }).collect(); let vdata = if fields.is_empty() { ast::VariantData::Unit(ast::DUMMY_NODE_ID) } else { ast::VariantData::Tuple(fields, ast::DUMMY_NODE_ID) }; respan(span, ast::Variant_ { ident, attrs: Vec::new(), data: vdata, disr_expr: None, }) } fn item_enum_poly(&self, span: Span, name: Ident, enum_definition: ast::EnumDef, generics: Generics) -> P<ast::Item> { self.item(span, name, Vec::new(), ast::ItemKind::Enum(enum_definition, generics)) } fn item_enum(&self, span: Span, name: Ident, enum_definition: ast::EnumDef) -> P<ast::Item> { self.item_enum_poly(span, name, enum_definition, Generics::default()) } fn item_struct(&self, span: Span, name: Ident, struct_def: ast::VariantData) -> P<ast::Item> { self.item_struct_poly( span, name, struct_def, Generics::default() ) } fn item_struct_poly(&self, span: Span, name: Ident, struct_def: ast::VariantData, generics: Generics) -> P<ast::Item> { self.item(span, name, Vec::new(), ast::ItemKind::Struct(struct_def, generics)) } fn item_mod(&self, span: Span, inner_span: Span, name: Ident, attrs: Vec<ast::Attribute>, items: Vec<P<ast::Item>>) -> P<ast::Item> { self.item( span, name, attrs, ast::ItemKind::Mod(ast::Mod { inner: inner_span, items, }) ) } fn item_extern_crate(&self, span: Span, name: Ident) -> P<ast::Item> { self.item(span, name, Vec::new(), ast::ItemKind::ExternCrate(None)) } fn item_static(&self, span: Span, name: Ident, ty: P<ast::Ty>, mutbl: ast::Mutability, expr: P<ast::Expr>) -> P<ast::Item> { self.item(span, name, Vec::new(), ast::ItemKind::Static(ty, mutbl, expr)) } fn item_const(&self, span: Span, name: Ident, ty: P<ast::Ty>, expr: P<ast::Expr>) -> P<ast::Item> { self.item(span, name, Vec::new(), ast::ItemKind::Const(ty, expr)) } fn item_ty_poly(&self, span: Span, name: Ident, ty: P<ast::Ty>, generics: Generics) -> P<ast::Item> { self.item(span, name, Vec::new(), ast::ItemKind::Ty(ty, generics)) } fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> P<ast::Item> { self.item_ty_poly(span, name, ty, Generics::default()) } fn attribute(&self, sp: Span, mi: ast::MetaItem) -> ast::Attribute { attr::mk_spanned_attr_outer(sp, attr::mk_attr_id(), mi) } fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem { attr::mk_word_item(Ident::with_empty_ctxt(w).with_span_pos(sp)) } fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem { attr::mk_nested_word_item(Ident::with_empty_ctxt(w).with_span_pos(sp)) } fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec<ast::NestedMetaItem>) -> ast::MetaItem { attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis) } fn meta_name_value(&self, sp: Span, name: ast::Name, value: ast::LitKind) -> ast::MetaItem { attr::mk_name_value_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), respan(sp, value)) } fn item_use(&self, sp: Span, vis: ast::Visibility, vp: P<ast::UseTree>) -> P<ast::Item> { P(ast::Item { id: ast::DUMMY_NODE_ID, ident: keywords::Invalid.ident(), attrs: vec![], node: ast::ItemKind::Use(vp), vis, span: sp, tokens: None, }) } fn item_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> P<ast::Item> { self.item_use_simple_(sp, vis, None, path) } fn item_use_simple_(&self, sp: Span, vis: ast::Visibility, rename: Option<ast::Ident>, path: ast::Path) -> P<ast::Item> { self.item_use(sp, vis, P(ast::UseTree { span: sp, prefix: path, kind: ast::UseTreeKind::Simple(rename, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID), })) } fn item_use_list(&self, sp: Span, vis: ast::Visibility, path: Vec<ast::Ident>, imports: &[ast::Ident]) -> P<ast::Item> { let imports = imports.iter().map(|id| { (ast::UseTree { span: sp, prefix: self.path(sp, vec![*id]), kind: ast::UseTreeKind::Simple(None, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID), }, ast::DUMMY_NODE_ID) }).collect(); self.item_use(sp, vis, P(ast::UseTree { span: sp, prefix: self.path(sp, path), kind: ast::UseTreeKind::Nested(imports), })) } fn item_use_glob(&self, sp: Span, vis: ast::Visibility, path: Vec<ast::Ident>) -> P<ast::Item> { self.item_use(sp, vis, P(ast::UseTree { span: sp, prefix: self.path(sp, path), kind: ast::UseTreeKind::Glob, })) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/expand.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::{self, Block, Ident, NodeId, PatKind, Path}; use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use source_map::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan}; use config::{is_test_or_bench, StripUnconfigured}; use errors::{Applicability, FatalError}; use ext::base::*; use ext::build::AstBuilder; use ext::derive::{add_derived_markers, collect_derives}; use ext::hygiene::{self, Mark, SyntaxContext}; use ext::placeholders::{placeholder, PlaceholderExpander}; use feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err}; use fold; use fold::*; use parse::{DirectoryOwnership, PResult, ParseSess}; use parse::token::{self, Token}; use parse::parser::Parser; use ptr::P; use OneVector; use symbol::Symbol; use symbol::keywords; use syntax_pos::{Span, DUMMY_SP, FileName}; use syntax_pos::hygiene::ExpnFormat; use tokenstream::{TokenStream, TokenTree}; use visit::{self, Visitor}; use std::collections::HashMap; use std::fs::File; use std::io::Read; use std::iter::FromIterator; use std::{iter, mem}; use std::rc::Rc; use std::path::PathBuf; macro_rules! ast_fragments { ( $($Kind:ident($AstTy:ty) { $kind_name:expr; // FIXME: HACK: this should be `$(one ...)?` and `$(many ...)?` but `?` macro // repetition was removed from 2015 edition in #51587 because of ambiguities. $(one fn $fold_ast:ident; fn $visit_ast:ident;)* $(many fn $fold_ast_elt:ident; fn $visit_ast_elt:ident;)* fn $make_ast:ident; })* ) => { /// A fragment of AST that can be produced by a single macro expansion. /// Can also serve as an input and intermediate result for macro expansion operations. pub enum AstFragment { OptExpr(Option<P<ast::Expr>>), $($Kind($AstTy),)* } /// "Discriminant" of an AST fragment. #[derive(Copy, Clone, PartialEq, Eq)] pub enum AstFragmentKind { OptExpr, $($Kind,)* } impl AstFragmentKind { pub fn name(self) -> &'static str { match self { AstFragmentKind::OptExpr => "expression", $(AstFragmentKind::$Kind => $kind_name,)* } } fn make_from<'a>(self, result: Box<dyn MacResult + 'a>) -> Option<AstFragment> { match self { AstFragmentKind::OptExpr => result.make_expr().map(Some).map(AstFragment::OptExpr), $(AstFragmentKind::$Kind => result.$make_ast().map(AstFragment::$Kind),)* } } } impl AstFragment { pub fn make_opt_expr(self) -> Option<P<ast::Expr>> { match self { AstFragment::OptExpr(expr) => expr, _ => panic!("AstFragment::make_* called on the wrong kind of fragment"), } } $(pub fn $make_ast(self) -> $AstTy { match self { AstFragment::$Kind(ast) => ast, _ => panic!("AstFragment::make_* called on the wrong kind of fragment"), } })* pub fn fold_with<F: Folder>(self, folder: &mut F) -> Self { match self { AstFragment::OptExpr(expr) => AstFragment::OptExpr(expr.and_then(|expr| folder.fold_opt_expr(expr))), $($(AstFragment::$Kind(ast) => AstFragment::$Kind(folder.$fold_ast(ast)),)*)* $($(AstFragment::$Kind(ast) => AstFragment::$Kind(ast.into_iter() .flat_map(|ast| folder.$fold_ast_elt(ast)) .collect()),)*)* } } pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) { match *self { AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr), AstFragment::OptExpr(None) => {} $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)*)* $($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] { visitor.$visit_ast_elt(ast_elt); })*)* } } } impl<'a, 'b> Folder for MacroExpander<'a, 'b> { fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> { self.expand_fragment(AstFragment::OptExpr(Some(expr))).make_opt_expr() } $($(fn $fold_ast(&mut self, ast: $AstTy) -> $AstTy { self.expand_fragment(AstFragment::$Kind(ast)).$make_ast() })*)* $($(fn $fold_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy { self.expand_fragment(AstFragment::$Kind(smallvec![ast_elt])).$make_ast() })*)* } impl<'a> MacResult for ::ext::tt::macro_rules::ParserAnyMacro<'a> { $(fn $make_ast(self: Box<::ext::tt::macro_rules::ParserAnyMacro<'a>>) -> Option<$AstTy> { Some(self.make(AstFragmentKind::$Kind).$make_ast()) })* } } } ast_fragments! { Expr(P<ast::Expr>) { "expression"; one fn fold_expr; fn visit_expr; fn make_expr; } Pat(P<ast::Pat>) { "pattern"; one fn fold_pat; fn visit_pat; fn make_pat; } Ty(P<ast::Ty>) { "type"; one fn fold_ty; fn visit_ty; fn make_ty; } Stmts(OneVector<ast::Stmt>) { "statement"; many fn fold_stmt; fn visit_stmt; fn make_stmts; } Items(OneVector<P<ast::Item>>) { "item"; many fn fold_item; fn visit_item; fn make_items; } TraitItems(OneVector<ast::TraitItem>) { "trait item"; many fn fold_trait_item; fn visit_trait_item; fn make_trait_items; } ImplItems(OneVector<ast::ImplItem>) { "impl item"; many fn fold_impl_item; fn visit_impl_item; fn make_impl_items; } ForeignItems(OneVector<ast::ForeignItem>) { "foreign item"; many fn fold_foreign_item; fn visit_foreign_item; fn make_foreign_items; } } impl AstFragmentKind { fn dummy(self, span: Span) -> Option<AstFragment> { self.make_from(DummyResult::any(span)) } fn expect_from_annotatables<I: IntoIterator<Item = Annotatable>>(self, items: I) -> AstFragment { let mut items = items.into_iter(); match self { AstFragmentKind::Items => AstFragment::Items(items.map(Annotatable::expect_item).collect()), AstFragmentKind::ImplItems => AstFragment::ImplItems(items.map(Annotatable::expect_impl_item).collect()), AstFragmentKind::TraitItems => AstFragment::TraitItems(items.map(Annotatable::expect_trait_item).collect()), AstFragmentKind::ForeignItems => AstFragment::ForeignItems(items.map(Annotatable::expect_foreign_item).collect()), AstFragmentKind::Stmts => AstFragment::Stmts(items.map(Annotatable::expect_stmt).collect()), AstFragmentKind::Expr => AstFragment::Expr( items.next().expect("expected exactly one expression").expect_expr() ), AstFragmentKind::OptExpr => AstFragment::OptExpr(items.next().map(Annotatable::expect_expr)), AstFragmentKind::Pat | AstFragmentKind::Ty => panic!("patterns and types aren't annotatable"), } } } fn macro_bang_format(path: &ast::Path) -> ExpnFormat { // We don't want to format a path using pretty-printing, // `format!("{}", path)`, because that tries to insert // line-breaks and is slow. let mut path_str = String::with_capacity(64); for (i, segment) in path.segments.iter().enumerate() { if i != 0 { path_str.push_str("::"); } if segment.ident.name != keywords::CrateRoot.name() && segment.ident.name != keywords::DollarCrate.name() { path_str.push_str(&segment.ident.as_str()) } } MacroBang(Symbol::intern(&path_str)) } pub struct Invocation { pub kind: InvocationKind, fragment_kind: AstFragmentKind, pub expansion_data: ExpansionData, } pub enum InvocationKind { Bang { mac: ast::Mac, ident: Option<Ident>, span: Span, }, Attr { attr: Option<ast::Attribute>, traits: Vec<Path>, item: Annotatable, }, Derive { path: Path, item: Annotatable, }, } impl Invocation { pub fn span(&self) -> Span { match self.kind { InvocationKind::Bang { span, .. } => span, InvocationKind::Attr { attr: Some(ref attr), .. } => attr.span, InvocationKind::Attr { attr: None, .. } => DUMMY_SP, InvocationKind::Derive { ref path, .. } => path.span, } } } pub struct MacroExpander<'a, 'b:'a> { pub cx: &'a mut ExtCtxt<'b>, monotonic: bool, // c.f. `cx.monotonic_expander()` } impl<'a, 'b> MacroExpander<'a, 'b> { pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self { MacroExpander { cx: cx, monotonic: monotonic } } pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { let mut module = ModuleData { mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)], directory: match self.cx.source_map().span_to_unmapped_path(krate.span) { FileName::Real(path) => path, other => PathBuf::from(other.to_string()), }, }; module.directory.pop(); self.cx.root_path = module.directory.clone(); self.cx.current_expansion.module = Rc::new(module); self.cx.current_expansion.crate_span = Some(krate.span); let orig_mod_span = krate.module.inner; let krate_item = AstFragment::Items(smallvec![P(ast::Item { attrs: krate.attrs, span: krate.span, node: ast::ItemKind::Mod(krate.module), ident: keywords::Invalid.ident(), id: ast::DUMMY_NODE_ID, vis: respan(krate.span.shrink_to_lo(), ast::VisibilityKind::Public), tokens: None, })]); match self.expand_fragment(krate_item).make_items().pop().map(P::into_inner) { Some(ast::Item { attrs, node: ast::ItemKind::Mod(module), .. }) => { krate.attrs = attrs; krate.module = module; }, None => { // Resolution failed so we return an empty expansion krate.attrs = vec![]; krate.module = ast::Mod { inner: orig_mod_span, items: vec![], }; }, _ => unreachable!(), }; self.cx.trace_macros_diag(); krate } // Fully expand all macro invocations in this AST fragment. fn expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment { let orig_expansion_data = self.cx.current_expansion.clone(); self.cx.current_expansion.depth = 0; // Collect all macro invocations and replace them with placeholders. let (fragment_with_placeholders, mut invocations) = self.collect_invocations(input_fragment, &[]); // Optimization: if we resolve all imports now, // we'll be able to immediately resolve most of imported macros. self.resolve_imports(); // Resolve paths in all invocations and produce output expanded fragments for them, but // do not insert them into our input AST fragment yet, only store in `expanded_fragments`. // The output fragments also go through expansion recursively until no invocations are left. // Unresolved macros produce dummy outputs as a recovery measure. invocations.reverse(); let mut expanded_fragments = Vec::new(); let mut derives: HashMap<Mark, Vec<_>> = HashMap::new(); let mut undetermined_invocations = Vec::new(); let (mut progress, mut force) = (false, !self.monotonic); loop { let invoc = if let Some(invoc) = invocations.pop() { invoc } else { self.resolve_imports(); if undetermined_invocations.is_empty() { break } invocations = mem::replace(&mut undetermined_invocations, Vec::new()); force = !mem::replace(&mut progress, false); continue }; let scope = if self.monotonic { invoc.expansion_data.mark } else { orig_expansion_data.mark }; let ext = match self.cx.resolver.resolve_macro_invocation(&invoc, scope, force) { Ok(ext) => Some(ext), Err(Determinacy::Determined) => None, Err(Determinacy::Undetermined) => { undetermined_invocations.push(invoc); continue } }; progress = true; let ExpansionData { depth, mark, .. } = invoc.expansion_data; self.cx.current_expansion = invoc.expansion_data.clone(); self.cx.current_expansion.mark = scope; // FIXME(jseyfried): Refactor out the following logic let (expanded_fragment, new_invocations) = if let Some(ext) = ext { if let Some(ext) = ext { let dummy = invoc.fragment_kind.dummy(invoc.span()).unwrap(); let fragment = self.expand_invoc(invoc, &*ext).unwrap_or(dummy); self.collect_invocations(fragment, &[]) } else if let InvocationKind::Attr { attr: None, traits, item } = invoc.kind { if !item.derive_allowed() { let attr = attr::find_by_name(item.attrs(), "derive") .expect("`derive` attribute should exist"); let span = attr.span; let mut err = self.cx.mut_span_err(span, "`derive` may only be applied to \ structs, enums and unions"); if let ast::AttrStyle::Inner = attr.style { let trait_list = traits.iter() .map(|t| t.to_string()).collect::<Vec<_>>(); let suggestion = format!("#[derive({})]", trait_list.join(", ")); err.span_suggestion_with_applicability( span, "try an outer attribute", suggestion, // We don't 𝑘𝑛𝑜𝑤 that the following item is an ADT Applicability::MaybeIncorrect ); } err.emit(); } let item = self.fully_configure(item) .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); let item_with_markers = add_derived_markers(&mut self.cx, item.span(), &traits, item.clone()); let derives = derives.entry(invoc.expansion_data.mark).or_default(); for path in &traits { let mark = Mark::fresh(self.cx.current_expansion.mark); derives.push(mark); let item = match self.cx.resolver.resolve_macro_path( path, MacroKind::Derive, Mark::root(), &[], false) { Ok(ext) => match *ext { BuiltinDerive(..) => item_with_markers.clone(), _ => item.clone(), }, _ => item.clone(), }; invocations.push(Invocation { kind: InvocationKind::Derive { path: path.clone(), item: item }, fragment_kind: invoc.fragment_kind, expansion_data: ExpansionData { mark, ..invoc.expansion_data.clone() }, }); } let fragment = invoc.fragment_kind .expect_from_annotatables(::std::iter::once(item_with_markers)); self.collect_invocations(fragment, derives) } else { unreachable!() } } else { self.collect_invocations(invoc.fragment_kind.dummy(invoc.span()).unwrap(), &[]) }; if expanded_fragments.len() < depth { expanded_fragments.push(Vec::new()); } expanded_fragments[depth - 1].push((mark, expanded_fragment)); if !self.cx.ecfg.single_step { invocations.extend(new_invocations.into_iter().rev()); } } self.cx.current_expansion = orig_expansion_data; // Finally incorporate all the expanded macros into the input AST fragment. let mut placeholder_expander = PlaceholderExpander::new(self.cx, self.monotonic); while let Some(expanded_fragments) = expanded_fragments.pop() { for (mark, expanded_fragment) in expanded_fragments.into_iter().rev() { let derives = derives.remove(&mark).unwrap_or_else(Vec::new); placeholder_expander.add(NodeId::placeholder_from_mark(mark), expanded_fragment, derives); } } fragment_with_placeholders.fold_with(&mut placeholder_expander) } fn resolve_imports(&mut self) { if self.monotonic { let err_count = self.cx.parse_sess.span_diagnostic.err_count(); self.cx.resolver.resolve_imports(); self.cx.resolve_err_count += self.cx.parse_sess.span_diagnostic.err_count() - err_count; } } /// Collect all macro invocations reachable at this time in this AST fragment, and replace /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s. /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and /// prepares data for resolving paths of macro invocations. fn collect_invocations(&mut self, fragment: AstFragment, derives: &[Mark]) -> (AstFragment, Vec<Invocation>) { let (fragment_with_placeholders, invocations) = { let mut collector = InvocationCollector { cfg: StripUnconfigured { should_test: self.cx.ecfg.should_test, sess: self.cx.parse_sess, features: self.cx.ecfg.features, }, cx: self.cx, invocations: Vec::new(), monotonic: self.monotonic, tests_nameable: true, }; (fragment.fold_with(&mut collector), collector.invocations) }; if self.monotonic { let err_count = self.cx.parse_sess.span_diagnostic.err_count(); let mark = self.cx.current_expansion.mark; self.cx.resolver.visit_ast_fragment_with_placeholders(mark, &fragment_with_placeholders, derives); self.cx.resolve_err_count += self.cx.parse_sess.span_diagnostic.err_count() - err_count; } (fragment_with_placeholders, invocations) } fn fully_configure(&mut self, item: Annotatable) -> Annotatable { let mut cfg = StripUnconfigured { should_test: self.cx.ecfg.should_test, sess: self.cx.parse_sess, features: self.cx.ecfg.features, }; // Since the item itself has already been configured by the InvocationCollector, // we know that fold result vector will contain exactly one element match item { Annotatable::Item(item) => { Annotatable::Item(cfg.fold_item(item).pop().unwrap()) } Annotatable::TraitItem(item) => { Annotatable::TraitItem(item.map(|item| cfg.fold_trait_item(item).pop().unwrap())) } Annotatable::ImplItem(item) => { Annotatable::ImplItem(item.map(|item| cfg.fold_impl_item(item).pop().unwrap())) } Annotatable::ForeignItem(item) => { Annotatable::ForeignItem( item.map(|item| cfg.fold_foreign_item(item).pop().unwrap()) ) } Annotatable::Stmt(stmt) => { Annotatable::Stmt(stmt.map(|stmt| cfg.fold_stmt(stmt).pop().unwrap())) } Annotatable::Expr(expr) => { Annotatable::Expr(cfg.fold_expr(expr)) } } } fn expand_invoc(&mut self, invoc: Invocation, ext: &SyntaxExtension) -> Option<AstFragment> { if invoc.fragment_kind == AstFragmentKind::ForeignItems && !self.cx.ecfg.macros_in_extern_enabled() { if let SyntaxExtension::NonMacroAttr { .. } = *ext {} else { emit_feature_err(&self.cx.parse_sess, "macros_in_extern", invoc.span(), GateIssue::Language, "macro invocations in `extern {}` blocks are experimental"); } } let result = match invoc.kind { InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext)?, InvocationKind::Attr { .. } => self.expand_attr_invoc(invoc, ext)?, InvocationKind::Derive { .. } => self.expand_derive_invoc(invoc, ext)?, }; if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit { let info = self.cx.current_expansion.mark.expn_info().unwrap(); let suggested_limit = self.cx.ecfg.recursion_limit * 2; let mut err = self.cx.struct_span_err(info.call_site, &format!("recursion limit reached while expanding the macro `{}`", info.format.name())); err.help(&format!( "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", suggested_limit)); err.emit(); self.cx.trace_macros_diag(); FatalError.raise(); } Some(result) } fn expand_attr_invoc(&mut self, invoc: Invocation, ext: &SyntaxExtension) -> Option<AstFragment> { let (attr, item) = match invoc.kind { InvocationKind::Attr { attr, item, .. } => (attr?, item), _ => unreachable!(), }; if let NonMacroAttr { mark_used: false } = *ext {} else { // Macro attrs are always used when expanded, // non-macro attrs are considered used when the field says so. attr::mark_used(&attr); } invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: attr.span, def_site: None, format: MacroAttribute(Symbol::intern(&attr.path.to_string())), allow_internal_unstable: false, allow_internal_unsafe: false, local_inner_macros: false, edition: ext.edition(), }); match *ext { NonMacroAttr { .. } => { attr::mark_known(&attr); let item = item.map_attrs(|mut attrs| { attrs.push(attr); attrs }); Some(invoc.fragment_kind.expect_from_annotatables(iter::once(item))) } MultiModifier(ref mac) => { let meta = attr.parse_meta(self.cx.parse_sess) .map_err(|mut e| { e.emit(); }).ok()?; let item = mac.expand(self.cx, attr.span, &meta, item); Some(invoc.fragment_kind.expect_from_annotatables(item)) } MultiDecorator(ref mac) => { let mut items = Vec::new(); let meta = attr.parse_meta(self.cx.parse_sess) .expect("derive meta should already have been parsed"); mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item)); items.push(item); Some(invoc.fragment_kind.expect_from_annotatables(items)) } AttrProcMacro(ref mac, ..) => { self.gate_proc_macro_attr_item(attr.span, &item); let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()), Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()), Annotatable::Expr(expr) => token::NtExpr(expr), })).into(); let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span); let tok_result = mac.expand(self.cx, attr.span, input, item_tok); let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind, &attr.path, attr.span); self.gate_proc_macro_expansion(attr.span, &res); res } ProcMacroDerive(..) | BuiltinDerive(..) => { self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path)); self.cx.trace_macros_diag(); invoc.fragment_kind.dummy(attr.span) } _ => { let msg = &format!("macro `{}` may not be used in attributes", attr.path); self.cx.span_err(attr.span, msg); self.cx.trace_macros_diag(); invoc.fragment_kind.dummy(attr.span) } } } fn extract_proc_macro_attr_input(&self, tokens: TokenStream, span: Span) -> TokenStream { let mut trees = tokens.trees(); match trees.next() { Some(TokenTree::Delimited(_, delim)) => { if trees.next().is_none() { return delim.tts.into() } } Some(TokenTree::Token(..)) => {} None => return TokenStream::empty(), } self.cx.span_err(span, "custom attribute invocations must be \ of the form #[foo] or #[foo(..)], the macro name must only be \ followed by a delimiter token"); TokenStream::empty() } fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) { let (kind, gate) = match *item { Annotatable::Item(ref item) => { match item.node { ItemKind::Mod(_) if self.cx.ecfg.proc_macro_mod() => return, ItemKind::Mod(_) => ("modules", "proc_macro_mod"), _ => return, } } Annotatable::TraitItem(_) => return, Annotatable::ImplItem(_) => return, Annotatable::ForeignItem(_) => return, Annotatable::Stmt(_) | Annotatable::Expr(_) if self.cx.ecfg.proc_macro_expr() => return, Annotatable::Stmt(_) => ("statements", "proc_macro_expr"), Annotatable::Expr(_) => ("expressions", "proc_macro_expr"), }; emit_feature_err( self.cx.parse_sess, gate, span, GateIssue::Language, &format!("custom attributes cannot be applied to {}", kind), ); } fn gate_proc_macro_expansion(&self, span: Span, fragment: &Option<AstFragment>) { if self.cx.ecfg.proc_macro_gen() { return } let fragment = match fragment { Some(fragment) => fragment, None => return, }; fragment.visit_with(&mut DisallowModules { span, parse_sess: self.cx.parse_sess, }); struct DisallowModules<'a> { span: Span, parse_sess: &'a ParseSess, } impl<'ast, 'a> Visitor<'ast> for DisallowModules<'a> { fn visit_item(&mut self, i: &'ast ast::Item) { let name = match i.node { ast::ItemKind::Mod(_) => Some("modules"), ast::ItemKind::MacroDef(_) => Some("macro definitions"), _ => None, }; if let Some(name) = name { emit_feature_err( self.parse_sess, "proc_macro_gen", self.span, GateIssue::Language, &format!("procedural macros cannot expand to {}", name), ); } visit::walk_item(self, i); } fn visit_mac(&mut self, _mac: &'ast ast::Mac) { // ... } } } /// Expand a macro invocation. Returns the resulting expanded AST fragment. fn expand_bang_invoc(&mut self, invoc: Invocation, ext: &SyntaxExtension) -> Option<AstFragment> { let (mark, kind) = (invoc.expansion_data.mark, invoc.fragment_kind); let (mac, ident, span) = match invoc.kind { InvocationKind::Bang { mac, ident, span } => (mac, ident, span), _ => unreachable!(), }; let path = &mac.node.path; let ident = ident.unwrap_or_else(|| keywords::Invalid.ident()); let validate_and_set_expn_info = |this: &mut Self, // arg instead of capture def_site_span: Option<Span>, allow_internal_unstable, allow_internal_unsafe, local_inner_macros, // can't infer this type unstable_feature: Option<(Symbol, u32)>, edition| { // feature-gate the macro invocation if let Some((feature, issue)) = unstable_feature { let crate_span = this.cx.current_expansion.crate_span.unwrap(); // don't stability-check macros in the same crate // (the only time this is null is for syntax extensions registered as macros) if def_site_span.map_or(false, |def_span| !crate_span.contains(def_span)) && !span.allows_unstable() && this.cx.ecfg.features.map_or(true, |feats| { // macro features will count as lib features !feats.declared_lib_features.iter().any(|&(feat, _)| feat == feature) }) { let explain = format!("macro {}! is unstable", path); emit_feature_err(this.cx.parse_sess, &*feature.as_str(), span, GateIssue::Library(Some(issue)), &explain); this.cx.trace_macros_diag(); return Err(kind.dummy(span)); } } if ident.name != keywords::Invalid.name() { let msg = format!("macro {}! expects no ident argument, given '{}'", path, ident); this.cx.span_err(path.span, &msg); this.cx.trace_macros_diag(); return Err(kind.dummy(span)); } mark.set_expn_info(ExpnInfo { call_site: span, def_site: def_site_span, format: macro_bang_format(path), allow_internal_unstable, allow_internal_unsafe, local_inner_macros, edition, }); Ok(()) }; let opt_expanded = match *ext { DeclMacro { ref expander, def_info, edition, .. } => { if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s), false, false, false, None, edition) { dummy_span } else { kind.make_from(expander.expand(self.cx, span, mac.node.stream())) } } NormalTT { ref expander, def_info, allow_internal_unstable, allow_internal_unsafe, local_inner_macros, unstable_feature, edition, } => { if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s), allow_internal_unstable, allow_internal_unsafe, local_inner_macros, unstable_feature, edition) { dummy_span } else { kind.make_from(expander.expand(self.cx, span, mac.node.stream())) } } IdentTT(ref expander, tt_span, allow_internal_unstable) => { if ident.name == keywords::Invalid.name() { self.cx.span_err(path.span, &format!("macro {}! expects an ident argument", path)); self.cx.trace_macros_diag(); kind.dummy(span) } else { invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, def_site: tt_span, format: macro_bang_format(path), allow_internal_unstable, allow_internal_unsafe: false, local_inner_macros: false, edition: hygiene::default_edition(), }); let input: Vec<_> = mac.node.stream().into_trees().collect(); kind.make_from(expander.expand(self.cx, span, ident, input)) } } MultiDecorator(..) | MultiModifier(..) | AttrProcMacro(..) | SyntaxExtension::NonMacroAttr { .. } => { self.cx.span_err(path.span, &format!("`{}` can only be used in attributes", path)); self.cx.trace_macros_diag(); kind.dummy(span) } ProcMacroDerive(..) | BuiltinDerive(..) => { self.cx.span_err(path.span, &format!("`{}` is a derive mode", path)); self.cx.trace_macros_diag(); kind.dummy(span) } SyntaxExtension::ProcMacro { ref expander, allow_internal_unstable, edition } => { if ident.name != keywords::Invalid.name() { let msg = format!("macro {}! expects no ident argument, given '{}'", path, ident); self.cx.span_err(path.span, &msg); self.cx.trace_macros_diag(); kind.dummy(span) } else { self.gate_proc_macro_expansion_kind(span, kind); invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, // FIXME procedural macros do not have proper span info // yet, when they do, we should use it here. def_site: None, format: macro_bang_format(path), // FIXME probably want to follow macro_rules macros here. allow_internal_unstable, allow_internal_unsafe: false, local_inner_macros: false, edition, }); let tok_result = expander.expand(self.cx, span, mac.node.stream()); let result = self.parse_ast_fragment(tok_result, kind, path, span); self.gate_proc_macro_expansion(span, &result); result } } }; if opt_expanded.is_some() { opt_expanded } else { let msg = format!("non-{kind} macro in {kind} position: {name}", name = path.segments[0].ident.name, kind = kind.name()); self.cx.span_err(path.span, &msg); self.cx.trace_macros_diag(); kind.dummy(span) } } fn gate_proc_macro_expansion_kind(&self, span: Span, kind: AstFragmentKind) { let kind = match kind { AstFragmentKind::Expr => "expressions", AstFragmentKind::OptExpr => "expressions", AstFragmentKind::Pat => "patterns", AstFragmentKind::Ty => "types", AstFragmentKind::Stmts => "statements", AstFragmentKind::Items => return, AstFragmentKind::TraitItems => return, AstFragmentKind::ImplItems => return, AstFragmentKind::ForeignItems => return, }; if self.cx.ecfg.proc_macro_non_items() { return } emit_feature_err( self.cx.parse_sess, "proc_macro_non_items", span, GateIssue::Language, &format!("procedural macros cannot be expanded to {}", kind), ); } /// Expand a derive invocation. Returns the resulting expanded AST fragment. fn expand_derive_invoc(&mut self, invoc: Invocation, ext: &SyntaxExtension) -> Option<AstFragment> { let (path, item) = match invoc.kind { InvocationKind::Derive { path, item } => (path, item), _ => unreachable!(), }; if !item.derive_allowed() { return None; } let pretty_name = Symbol::intern(&format!("derive({})", path)); let span = path.span; let attr = ast::Attribute { path, span, tokens: TokenStream::empty(), // irrelevant: id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false, }; let mut expn_info = ExpnInfo { call_site: span, def_site: None, format: MacroAttribute(pretty_name), allow_internal_unstable: false, allow_internal_unsafe: false, local_inner_macros: false, edition: ext.edition(), }; match *ext { ProcMacroDerive(ref ext, ..) => { invoc.expansion_data.mark.set_expn_info(expn_info); let span = span.with_ctxt(self.cx.backtrace()); let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this ident: Path::from_ident(keywords::Invalid.ident()), span: DUMMY_SP, node: ast::MetaItemKind::Word, }; let items = ext.expand(self.cx, span, &dummy, item); Some(invoc.fragment_kind.expect_from_annotatables(items)) } BuiltinDerive(func) => { expn_info.allow_internal_unstable = true; invoc.expansion_data.mark.set_expn_info(expn_info); let span = span.with_ctxt(self.cx.backtrace()); let mut items = Vec::new(); func(self.cx, span, &attr.meta()?, &item, &mut |a| items.push(a)); Some(invoc.fragment_kind.expect_from_annotatables(items)) } _ => { let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); self.cx.span_err(span, msg); self.cx.trace_macros_diag(); invoc.fragment_kind.dummy(span) } } } fn parse_ast_fragment(&mut self, toks: TokenStream, kind: AstFragmentKind, path: &Path, span: Span) -> Option<AstFragment> { let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::<Vec<_>>()); match parser.parse_ast_fragment(kind, false) { Ok(fragment) => { parser.ensure_complete_parse(path, kind.name(), span); Some(fragment) } Err(mut err) => { err.set_span(span); err.emit(); self.cx.trace_macros_diag(); kind.dummy(span) } } } } impl<'a> Parser<'a> { pub fn parse_ast_fragment(&mut self, kind: AstFragmentKind, macro_legacy_warnings: bool) -> PResult<'a, AstFragment> { Ok(match kind { AstFragmentKind::Items => { let mut items = OneVector::new(); while let Some(item) = self.parse_item()? { items.push(item); } AstFragment::Items(items) } AstFragmentKind::TraitItems => { let mut items = OneVector::new(); while self.token != token::Eof { items.push(self.parse_trait_item(&mut false)?); } AstFragment::TraitItems(items) } AstFragmentKind::ImplItems => { let mut items = OneVector::new(); while self.token != token::Eof { items.push(self.parse_impl_item(&mut false)?); } AstFragment::ImplItems(items) } AstFragmentKind::ForeignItems => { let mut items = OneVector::new(); while self.token != token::Eof { if let Some(item) = self.parse_foreign_item()? { items.push(item); } } AstFragment::ForeignItems(items) } AstFragmentKind::Stmts => { let mut stmts = OneVector::new(); while self.token != token::Eof && // won't make progress on a `}` self.token != token::CloseDelim(token::Brace) { if let Some(stmt) = self.parse_full_stmt(macro_legacy_warnings)? { stmts.push(stmt); } } AstFragment::Stmts(stmts) } AstFragmentKind::Expr => AstFragment::Expr(self.parse_expr()?), AstFragmentKind::OptExpr => { if self.token != token::Eof { AstFragment::OptExpr(Some(self.parse_expr()?)) } else { AstFragment::OptExpr(None) } }, AstFragmentKind::Ty => AstFragment::Ty(self.parse_ty()?), AstFragmentKind::Pat => AstFragment::Pat(self.parse_pat()?), }) } pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span: Span) { if self.token != token::Eof { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); // Avoid emitting backtrace info twice. let def_site_span = self.span.with_ctxt(SyntaxContext::empty()); let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); let msg = format!("caused by the macro expansion here; the usage \ of `{}!` is likely invalid in {} context", macro_path, kind_name); err.span_note(span, &msg).emit(); } } } struct InvocationCollector<'a, 'b: 'a> { cx: &'a mut ExtCtxt<'b>, cfg: StripUnconfigured<'a>, invocations: Vec<Invocation>, monotonic: bool, /// Test functions need to be nameable. Tests inside functions or in other /// unnameable locations need to be ignored. `tests_nameable` tracks whether /// any test functions found in the current context would be nameable. tests_nameable: bool, } impl<'a, 'b> InvocationCollector<'a, 'b> { fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> AstFragment { let mark = Mark::fresh(self.cx.current_expansion.mark); self.invocations.push(Invocation { kind, fragment_kind, expansion_data: ExpansionData { mark, depth: self.cx.current_expansion.depth + 1, ..self.cx.current_expansion.clone() }, }); placeholder(fragment_kind, NodeId::placeholder_from_mark(mark)) } /// Folds the item allowing tests to be expanded because they are still nameable. /// This should probably only be called with module items fn fold_nameable(&mut self, item: P<ast::Item>) -> OneVector<P<ast::Item>> { fold::noop_fold_item(item, self) } /// Folds the item but doesn't allow tests to occur within it fn fold_unnameable(&mut self, item: P<ast::Item>) -> OneVector<P<ast::Item>> { let was_nameable = mem::replace(&mut self.tests_nameable, false); let items = fold::noop_fold_item(item, self); self.tests_nameable = was_nameable; items } fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: AstFragmentKind) -> AstFragment { self.collect(kind, InvocationKind::Bang { mac: mac, ident: None, span: span }) } fn collect_attr(&mut self, attr: Option<ast::Attribute>, traits: Vec<Path>, item: Annotatable, kind: AstFragmentKind) -> AstFragment { self.collect(kind, InvocationKind::Attr { attr, traits, item }) } /// If `item` is an attr invocation, remove and return the macro attribute and derive traits. fn classify_item<T>(&mut self, mut item: T) -> (Option<ast::Attribute>, Vec<Path>, T) where T: HasAttrs, { let (mut attr, mut traits) = (None, Vec::new()); item = item.map_attrs(|mut attrs| { if let Some(legacy_attr_invoc) = self.cx.resolver.find_legacy_attr_invoc(&mut attrs, true) { attr = Some(legacy_attr_invoc); return attrs; } attr = find_attr_invoc(&mut attrs); traits = collect_derives(&mut self.cx, &mut attrs); attrs }); (attr, traits, item) } /// Alternative of `classify_item()` that ignores `#[derive]` so invocations fallthrough /// to the unused-attributes lint (making it an error on statements and expressions /// is a breaking change) fn classify_nonitem<T: HasAttrs>(&mut self, mut item: T) -> (Option<ast::Attribute>, T) { let mut attr = None; item = item.map_attrs(|mut attrs| { if let Some(legacy_attr_invoc) = self.cx.resolver.find_legacy_attr_invoc(&mut attrs, false) { attr = Some(legacy_attr_invoc); return attrs; } attr = find_attr_invoc(&mut attrs); attrs }); (attr, item) } fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> { self.cfg.configure(node) } // Detect use of feature-gated or invalid attributes on macro invocations // since they will not be detected after macro expansion. fn check_attributes(&mut self, attrs: &[ast::Attribute]) { let features = self.cx.ecfg.features.unwrap(); for attr in attrs.iter() { self.check_attribute_inner(attr, features); // macros are expanded before any lint passes so this warning has to be hardcoded if attr.path == "derive" { self.cx.struct_span_warn(attr.span, "`#[derive]` does nothing on macro invocations") .note("this may become a hard error in a future release") .emit(); } } } fn check_attribute(&mut self, at: &ast::Attribute) { let features = self.cx.ecfg.features.unwrap(); self.check_attribute_inner(at, features); } fn check_attribute_inner(&mut self, at: &ast::Attribute, features: &Features) { feature_gate::check_attribute(at, self.cx.parse_sess, features); } } pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> { attrs.iter() .position(|a| !attr::is_known(a) && !is_builtin_attr(a)) .map(|i| attrs.remove(i)) } impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> { let mut expr = self.cfg.configure_expr(expr).into_inner(); expr.node = self.cfg.configure_expr_kind(expr.node); // ignore derives so they remain unused let (attr, expr) = self.classify_nonitem(expr); if attr.is_some() { // collect the invoc regardless of whether or not attributes are permitted here // expansion will eat the attribute so it won't error later attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a)); // AstFragmentKind::Expr requires the macro to emit an expression return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)), AstFragmentKind::Expr).make_expr(); } if let ast::ExprKind::Mac(mac) = expr.node { self.check_attributes(&expr.attrs); self.collect_bang(mac, expr.span, AstFragmentKind::Expr).make_expr() } else { P(noop_fold_expr(expr, self)) } } fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> { let mut expr = configure!(self, expr).into_inner(); expr.node = self.cfg.configure_expr_kind(expr.node); // ignore derives so they remain unused let (attr, expr) = self.classify_nonitem(expr); if attr.is_some() { attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a)); return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)), AstFragmentKind::OptExpr) .make_opt_expr(); } if let ast::ExprKind::Mac(mac) = expr.node { self.check_attributes(&expr.attrs); self.collect_bang(mac, expr.span, AstFragmentKind::OptExpr).make_opt_expr() } else { Some(P(noop_fold_expr(expr, self))) } } fn fold_pat(&mut self, pat: P<ast::Pat>) -> P<ast::Pat> { let pat = self.cfg.configure_pat(pat); match pat.node { PatKind::Mac(_) => {} _ => return noop_fold_pat(pat, self), } pat.and_then(|pat| match pat.node { PatKind::Mac(mac) => self.collect_bang(mac, pat.span, AstFragmentKind::Pat).make_pat(), _ => unreachable!(), }) } fn fold_stmt(&mut self, stmt: ast::Stmt) -> OneVector<ast::Stmt> { let mut stmt = match self.cfg.configure_stmt(stmt) { Some(stmt) => stmt, None => return OneVector::new(), }; // we'll expand attributes on expressions separately if !stmt.is_expr() { let (attr, derives, stmt_) = if stmt.is_item() { self.classify_item(stmt) } else { // ignore derives on non-item statements so it falls through // to the unused-attributes lint let (attr, stmt) = self.classify_nonitem(stmt); (attr, vec![], stmt) }; if attr.is_some() || !derives.is_empty() { return self.collect_attr(attr, derives, Annotatable::Stmt(P(stmt_)), AstFragmentKind::Stmts) .make_stmts(); } stmt = stmt_; } if let StmtKind::Mac(mac) = stmt.node { let (mac, style, attrs) = mac.into_inner(); self.check_attributes(&attrs); let mut placeholder = self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts) .make_stmts(); // If this is a macro invocation with a semicolon, then apply that // semicolon to the final statement produced by expansion. if style == MacStmtStyle::Semicolon { if let Some(stmt) = placeholder.pop() { placeholder.push(stmt.add_trailing_semicolon()); } } return placeholder; } // The placeholder expander gives ids to statements, so we avoid folding the id here. let ast::Stmt { id, node, span } = stmt; noop_fold_stmt_kind(node, self).into_iter().map(|node| { ast::Stmt { id, node, span } }).collect() } fn fold_block(&mut self, block: P<Block>) -> P<Block> { let old_directory_ownership = self.cx.current_expansion.directory_ownership; self.cx.current_expansion.directory_ownership = DirectoryOwnership::UnownedViaBlock; let result = noop_fold_block(block, self); self.cx.current_expansion.directory_ownership = old_directory_ownership; result } fn fold_item(&mut self, item: P<ast::Item>) -> OneVector<P<ast::Item>> { let item = configure!(self, item); let (attr, traits, mut item) = self.classify_item(item); if attr.is_some() || !traits.is_empty() { let item = Annotatable::Item(item); return self.collect_attr(attr, traits, item, AstFragmentKind::Items).make_items(); } match item.node { ast::ItemKind::Mac(..) => { self.check_attributes(&item.attrs); item.and_then(|item| match item.node { ItemKind::Mac(mac) => { self.collect(AstFragmentKind::Items, InvocationKind::Bang { mac, ident: Some(item.ident), span: item.span, }).make_items() } _ => unreachable!(), }) } ast::ItemKind::Mod(ast::Mod { inner, .. }) => { if item.ident == keywords::Invalid.ident() { return self.fold_nameable(item); } let orig_directory_ownership = self.cx.current_expansion.directory_ownership; let mut module = (*self.cx.current_expansion.module).clone(); module.mod_path.push(item.ident); // Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`). // In the non-inline case, `inner` is never the dummy span (c.f. `parse_item_mod`). // Thus, if `inner` is the dummy span, we know the module is inline. let inline_module = item.span.contains(inner) || inner.is_dummy(); if inline_module { if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") { self.cx.current_expansion.directory_ownership = DirectoryOwnership::Owned { relative: None }; module.directory.push(&*path.as_str()); } else { module.directory.push(&*item.ident.as_str()); } } else { let path = self.cx.parse_sess.source_map().span_to_unmapped_path(inner); let mut path = match path { FileName::Real(path) => path, other => PathBuf::from(other.to_string()), }; let directory_ownership = match path.file_name().unwrap().to_str() { Some("mod.rs") => DirectoryOwnership::Owned { relative: None }, Some(_) => DirectoryOwnership::Owned { relative: Some(item.ident), }, None => DirectoryOwnership::UnownedViaMod(false), }; path.pop(); module.directory = path; self.cx.current_expansion.directory_ownership = directory_ownership; } let orig_module = mem::replace(&mut self.cx.current_expansion.module, Rc::new(module)); let result = self.fold_nameable(item); self.cx.current_expansion.module = orig_module; self.cx.current_expansion.directory_ownership = orig_directory_ownership; result } // Ensure that test functions are accessible from the test harness. // #[test] fn foo() {} // becomes: // #[test] pub fn foo_gensym(){} // #[allow(unused)] // use foo_gensym as foo; ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => { if self.tests_nameable && item.attrs.iter().any(|attr| is_test_or_bench(attr)) { let orig_ident = item.ident; let orig_vis = item.vis.clone(); // Publicize the item under gensymed name to avoid pollution item = item.map(|mut item| { item.vis = respan(item.vis.span, ast::VisibilityKind::Public); item.ident = item.ident.gensym(); item }); // Use the gensymed name under the item's original visibility let mut use_item = self.cx.item_use_simple_( item.ident.span, orig_vis, Some(orig_ident), self.cx.path(item.ident.span, vec![keywords::SelfValue.ident(), item.ident])); // #[allow(unused)] because the test function probably isn't being referenced use_item = use_item.map(|mut ui| { ui.attrs.push( self.cx.attribute(DUMMY_SP, attr::mk_list_item(DUMMY_SP, Ident::from_str("allow"), vec![ attr::mk_nested_word_item(Ident::from_str("unused")) ] )) ); ui }); OneVector::from_iter( self.fold_unnameable(item).into_iter() .chain(self.fold_unnameable(use_item))) } else { self.fold_unnameable(item) } } _ => self.fold_unnameable(item), } } fn fold_trait_item(&mut self, item: ast::TraitItem) -> OneVector<ast::TraitItem> { let item = configure!(self, item); let (attr, traits, item) = self.classify_item(item); if attr.is_some() || !traits.is_empty() { let item = Annotatable::TraitItem(P(item)); return self.collect_attr(attr, traits, item, AstFragmentKind::TraitItems) .make_trait_items() } match item.node { ast::TraitItemKind::Macro(mac) => { let ast::TraitItem { attrs, span, .. } = item; self.check_attributes(&attrs); self.collect_bang(mac, span, AstFragmentKind::TraitItems).make_trait_items() } _ => fold::noop_fold_trait_item(item, self), } } fn fold_impl_item(&mut self, item: ast::ImplItem) -> OneVector<ast::ImplItem> { let item = configure!(self, item); let (attr, traits, item) = self.classify_item(item); if attr.is_some() || !traits.is_empty() { let item = Annotatable::ImplItem(P(item)); return self.collect_attr(attr, traits, item, AstFragmentKind::ImplItems) .make_impl_items(); } match item.node { ast::ImplItemKind::Macro(mac) => { let ast::ImplItem { attrs, span, .. } = item; self.check_attributes(&attrs); self.collect_bang(mac, span, AstFragmentKind::ImplItems).make_impl_items() } _ => fold::noop_fold_impl_item(item, self), } } fn fold_ty(&mut self, ty: P<ast::Ty>) -> P<ast::Ty> { let ty = match ty.node { ast::TyKind::Mac(_) => ty.into_inner(), _ => return fold::noop_fold_ty(ty, self), }; match ty.node { ast::TyKind::Mac(mac) => self.collect_bang(mac, ty.span, AstFragmentKind::Ty).make_ty(), _ => unreachable!(), } } fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod { noop_fold_foreign_mod(self.cfg.configure_foreign_mod(foreign_mod), self) } fn fold_foreign_item(&mut self, foreign_item: ast::ForeignItem) -> OneVector<ast::ForeignItem> { let (attr, traits, foreign_item) = self.classify_item(foreign_item); if attr.is_some() || !traits.is_empty() { let item = Annotatable::ForeignItem(P(foreign_item)); return self.collect_attr(attr, traits, item, AstFragmentKind::ForeignItems) .make_foreign_items(); } if let ast::ForeignItemKind::Macro(mac) = foreign_item.node { self.check_attributes(&foreign_item.attrs); return self.collect_bang(mac, foreign_item.span, AstFragmentKind::ForeignItems) .make_foreign_items(); } noop_fold_foreign_item(foreign_item, self) } fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind { match item { ast::ItemKind::MacroDef(..) => item, _ => noop_fold_item_kind(self.cfg.configure_item_kind(item), self), } } fn fold_generic_param(&mut self, param: ast::GenericParam) -> ast::GenericParam { self.cfg.disallow_cfg_on_generic_param(&param); noop_fold_generic_param(param, self) } fn fold_attribute(&mut self, at: ast::Attribute) -> Option<ast::Attribute> { // turn `#[doc(include="filename")]` attributes into `#[doc(include(file="filename", // contents="file contents")]` attributes if !at.check_name("doc") { return noop_fold_attribute(at, self); } if let Some(list) = at.meta_item_list() { if !list.iter().any(|it| it.check_name("include")) { return noop_fold_attribute(at, self); } let mut items = vec![]; for it in list { if !it.check_name("include") { items.push(noop_fold_meta_list_item(it, self)); continue; } if let Some(file) = it.value_str() { let err_count = self.cx.parse_sess.span_diagnostic.err_count(); self.check_attribute(&at); if self.cx.parse_sess.span_diagnostic.err_count() > err_count { // avoid loading the file if they haven't enabled the feature return noop_fold_attribute(at, self); } let mut buf = vec![]; let filename = self.cx.root_path.join(file.to_string()); match File::open(&filename).and_then(|mut f| f.read_to_end(&mut buf)) { Ok(..) => {} Err(e) => { self.cx.span_err(at.span, &format!("couldn't read {}: {}", filename.display(), e)); } } match String::from_utf8(buf) { Ok(src) => { let src_interned = Symbol::intern(&src); // Add this input file to the code map to make it available as // dependency information self.cx.source_map().new_source_file(filename.into(), src); let include_info = vec![ dummy_spanned(ast::NestedMetaItemKind::MetaItem( attr::mk_name_value_item_str(Ident::from_str("file"), dummy_spanned(file)))), dummy_spanned(ast::NestedMetaItemKind::MetaItem( attr::mk_name_value_item_str(Ident::from_str("contents"), dummy_spanned(src_interned)))), ]; let include_ident = Ident::from_str("include"); let item = attr::mk_list_item(DUMMY_SP, include_ident, include_info); items.push(dummy_spanned(ast::NestedMetaItemKind::MetaItem(item))); } Err(_) => { self.cx.span_err(at.span, &format!("{} wasn't a utf-8 file", filename.display())); } } } else { items.push(noop_fold_meta_list_item(it, self)); } } let meta = attr::mk_list_item(DUMMY_SP, Ident::from_str("doc"), items); match at.style { ast::AttrStyle::Inner => Some(attr::mk_spanned_attr_inner(at.span, at.id, meta)), ast::AttrStyle::Outer => Some(attr::mk_spanned_attr_outer(at.span, at.id, meta)), } } else { noop_fold_attribute(at, self) } } fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId { if self.monotonic { assert_eq!(id, ast::DUMMY_NODE_ID); self.cx.resolver.next_node_id() } else { id } } } pub struct ExpansionConfig<'feat> { pub crate_name: String, pub features: Option<&'feat Features>, pub recursion_limit: usize, pub trace_mac: bool, pub should_test: bool, // If false, strip `#[test]` nodes pub single_step: bool, pub keep_macs: bool, } macro_rules! feature_tests { ($( fn $getter:ident = $field:ident, )*) => { $( pub fn $getter(&self) -> bool { match self.features { Some(&Features { $field: true, .. }) => true, _ => false, } } )* } } impl<'feat> ExpansionConfig<'feat> { pub fn default(crate_name: String) -> ExpansionConfig<'static> { ExpansionConfig { crate_name, features: None, recursion_limit: 1024, trace_mac: false, should_test: false, single_step: false, keep_macs: false, } } feature_tests! { fn enable_quotes = quote, fn enable_asm = asm, fn enable_global_asm = global_asm, fn enable_log_syntax = log_syntax, fn enable_concat_idents = concat_idents, fn enable_trace_macros = trace_macros, fn enable_allow_internal_unstable = allow_internal_unstable, fn enable_custom_derive = custom_derive, fn enable_format_args_nl = format_args_nl, fn macros_in_extern_enabled = macros_in_extern, fn proc_macro_mod = proc_macro_mod, fn proc_macro_gen = proc_macro_gen, fn proc_macro_expr = proc_macro_expr, fn proc_macro_non_items = proc_macro_non_items, } } // A Marker adds the given mark to the syntax context. #[derive(Debug)] pub struct Marker(pub Mark); impl Folder for Marker { fn new_span(&mut self, span: Span) -> Span { span.apply_mark(self.0) } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { noop_fold_mac(mac, self) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/base.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::SyntaxExtension::*; use ast::{self, Attribute, Name, PatKind, MetaItem}; use attr::HasAttrs; use source_map::{self, SourceMap, Spanned, respan}; use syntax_pos::{Span, MultiSpan, DUMMY_SP}; use edition::Edition; use errors::{DiagnosticBuilder, DiagnosticId}; use ext::expand::{self, AstFragment, Invocation}; use ext::hygiene::{self, Mark, SyntaxContext, Transparency}; use fold::{self, Folder}; use parse::{self, parser, DirectoryOwnership}; use parse::token; use ptr::P; use OneVector; use symbol::{keywords, Ident, Symbol}; use ThinVec; use std::collections::HashMap; use std::iter; use std::path::PathBuf; use std::rc::Rc; use rustc_data_structures::sync::{self, Lrc}; use std::default::Default; use tokenstream::{self, TokenStream}; #[derive(Debug,Clone)] pub enum Annotatable { Item(P<ast::Item>), TraitItem(P<ast::TraitItem>), ImplItem(P<ast::ImplItem>), ForeignItem(P<ast::ForeignItem>), Stmt(P<ast::Stmt>), Expr(P<ast::Expr>), } impl HasAttrs for Annotatable { fn attrs(&self) -> &[Attribute] { match *self { Annotatable::Item(ref item) => &item.attrs, Annotatable::TraitItem(ref trait_item) => &trait_item.attrs, Annotatable::ImplItem(ref impl_item) => &impl_item.attrs, Annotatable::ForeignItem(ref foreign_item) => &foreign_item.attrs, Annotatable::Stmt(ref stmt) => stmt.attrs(), Annotatable::Expr(ref expr) => &expr.attrs, } } fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self { match self { Annotatable::Item(item) => Annotatable::Item(item.map_attrs(f)), Annotatable::TraitItem(trait_item) => Annotatable::TraitItem(trait_item.map_attrs(f)), Annotatable::ImplItem(impl_item) => Annotatable::ImplItem(impl_item.map_attrs(f)), Annotatable::ForeignItem(foreign_item) => Annotatable::ForeignItem(foreign_item.map_attrs(f)), Annotatable::Stmt(stmt) => Annotatable::Stmt(stmt.map_attrs(f)), Annotatable::Expr(expr) => Annotatable::Expr(expr.map_attrs(f)), } } } impl Annotatable { pub fn span(&self) -> Span { match *self { Annotatable::Item(ref item) => item.span, Annotatable::TraitItem(ref trait_item) => trait_item.span, Annotatable::ImplItem(ref impl_item) => impl_item.span, Annotatable::ForeignItem(ref foreign_item) => foreign_item.span, Annotatable::Stmt(ref stmt) => stmt.span, Annotatable::Expr(ref expr) => expr.span, } } pub fn expect_item(self) -> P<ast::Item> { match self { Annotatable::Item(i) => i, _ => panic!("expected Item") } } pub fn map_item_or<F, G>(self, mut f: F, mut or: G) -> Annotatable where F: FnMut(P<ast::Item>) -> P<ast::Item>, G: FnMut(Annotatable) -> Annotatable { match self { Annotatable::Item(i) => Annotatable::Item(f(i)), _ => or(self) } } pub fn expect_trait_item(self) -> ast::TraitItem { match self { Annotatable::TraitItem(i) => i.into_inner(), _ => panic!("expected Item") } } pub fn expect_impl_item(self) -> ast::ImplItem { match self { Annotatable::ImplItem(i) => i.into_inner(), _ => panic!("expected Item") } } pub fn expect_foreign_item(self) -> ast::ForeignItem { match self { Annotatable::ForeignItem(i) => i.into_inner(), _ => panic!("expected foreign item") } } pub fn expect_stmt(self) -> ast::Stmt { match self { Annotatable::Stmt(stmt) => stmt.into_inner(), _ => panic!("expected statement"), } } pub fn expect_expr(self) -> P<ast::Expr> { match self { Annotatable::Expr(expr) => expr, _ => panic!("expected expression"), } } pub fn derive_allowed(&self) -> bool { match *self { Annotatable::Item(ref item) => match item.node { ast::ItemKind::Struct(..) | ast::ItemKind::Enum(..) | ast::ItemKind::Union(..) => true, _ => false, }, _ => false, } } } // A more flexible ItemDecorator. pub trait MultiItemDecorator { fn expand(&self, ecx: &mut ExtCtxt, sp: Span, meta_item: &ast::MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)); } impl<F> MultiItemDecorator for F where F : Fn(&mut ExtCtxt, Span, &ast::MetaItem, &Annotatable, &mut dyn FnMut(Annotatable)) { fn expand(&self, ecx: &mut ExtCtxt, sp: Span, meta_item: &ast::MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { (*self)(ecx, sp, meta_item, item, push) } } // `meta_item` is the annotation, and `item` is the item being modified. // FIXME Decorators should follow the same pattern too. pub trait MultiItemModifier { fn expand(&self, ecx: &mut ExtCtxt, span: Span, meta_item: &ast::MetaItem, item: Annotatable) -> Vec<Annotatable>; } impl<F, T> MultiItemModifier for F where F: Fn(&mut ExtCtxt, Span, &ast::MetaItem, Annotatable) -> T, T: Into<Vec<Annotatable>>, { fn expand(&self, ecx: &mut ExtCtxt, span: Span, meta_item: &ast::MetaItem, item: Annotatable) -> Vec<Annotatable> { (*self)(ecx, span, meta_item, item).into() } } impl Into<Vec<Annotatable>> for Annotatable { fn into(self) -> Vec<Annotatable> { vec![self] } } pub trait ProcMacro { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, ts: TokenStream) -> TokenStream; } impl<F> ProcMacro for F where F: Fn(TokenStream) -> TokenStream { fn expand<'cx>(&self, _ecx: &'cx mut ExtCtxt, _span: Span, ts: TokenStream) -> TokenStream { // FIXME setup implicit context in TLS before calling self. (*self)(ts) } } pub trait AttrProcMacro { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, annotation: TokenStream, annotated: TokenStream) -> TokenStream; } impl<F> AttrProcMacro for F where F: Fn(TokenStream, TokenStream) -> TokenStream { fn expand<'cx>(&self, _ecx: &'cx mut ExtCtxt, _span: Span, annotation: TokenStream, annotated: TokenStream) -> TokenStream { // FIXME setup implicit context in TLS before calling self. (*self)(annotation, annotated) } } /// Represents a thing that maps token trees to Macro Results pub trait TTMacroExpander { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) -> Box<dyn MacResult+'cx>; } pub type MacroExpanderFn = for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box<dyn MacResult+'cx>; impl<F> TTMacroExpander for F where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box<dyn MacResult+'cx> { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) -> Box<dyn MacResult+'cx> { struct AvoidInterpolatedIdents; impl Folder for AvoidInterpolatedIdents { fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree { if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt { if let token::NtIdent(ident, is_raw) = nt.0 { return tokenstream::TokenTree::Token(ident.span, token::Ident(ident, is_raw)); } } fold::noop_fold_tt(tt, self) } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { fold::noop_fold_mac(mac, self) } } let input: Vec<_> = input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect(); (*self)(ecx, span, &input) } } pub trait IdentMacroExpander { fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, token_tree: Vec<tokenstream::TokenTree>) -> Box<dyn MacResult+'cx>; } pub type IdentMacroExpanderFn = for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>) -> Box<dyn MacResult+'cx>; impl<F> IdentMacroExpander for F where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>) -> Box<dyn MacResult+'cx> { fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, token_tree: Vec<tokenstream::TokenTree>) -> Box<dyn MacResult+'cx> { (*self)(cx, sp, ident, token_tree) } } // Use a macro because forwarding to a simple function has type system issues macro_rules! make_stmts_default { ($me:expr) => { $me.make_expr().map(|e| smallvec![ast::Stmt { id: ast::DUMMY_NODE_ID, span: e.span, node: ast::StmtKind::Expr(e), }]) } } /// The result of a macro expansion. The return values of the various /// methods are spliced into the AST at the callsite of the macro. pub trait MacResult { /// Create an expression. fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> { None } /// Create zero or more items. fn make_items(self: Box<Self>) -> Option<OneVector<P<ast::Item>>> { None } /// Create zero or more impl items. fn make_impl_items(self: Box<Self>) -> Option<OneVector<ast::ImplItem>> { None } /// Create zero or more trait items. fn make_trait_items(self: Box<Self>) -> Option<OneVector<ast::TraitItem>> { None } /// Create zero or more items in an `extern {}` block fn make_foreign_items(self: Box<Self>) -> Option<OneVector<ast::ForeignItem>> { None } /// Create a pattern. fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> { None } /// Create zero or more statements. /// /// By default this attempts to create an expression statement, /// returning None if that fails. fn make_stmts(self: Box<Self>) -> Option<OneVector<ast::Stmt>> { make_stmts_default!(self) } fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> { None } } macro_rules! make_MacEager { ( $( $fld:ident: $t:ty, )* ) => { /// `MacResult` implementation for the common case where you've already /// built each form of AST that you might return. #[derive(Default)] pub struct MacEager { $( pub $fld: Option<$t>, )* } impl MacEager { $( pub fn $fld(v: $t) -> Box<dyn MacResult> { Box::new(MacEager { $fld: Some(v), ..Default::default() }) } )* } } } make_MacEager! { expr: P<ast::Expr>, pat: P<ast::Pat>, items: OneVector<P<ast::Item>>, impl_items: OneVector<ast::ImplItem>, trait_items: OneVector<ast::TraitItem>, foreign_items: OneVector<ast::ForeignItem>, stmts: OneVector<ast::Stmt>, ty: P<ast::Ty>, } impl MacResult for MacEager { fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> { self.expr } fn make_items(self: Box<Self>) -> Option<OneVector<P<ast::Item>>> { self.items } fn make_impl_items(self: Box<Self>) -> Option<OneVector<ast::ImplItem>> { self.impl_items } fn make_trait_items(self: Box<Self>) -> Option<OneVector<ast::TraitItem>> { self.trait_items } fn make_foreign_items(self: Box<Self>) -> Option<OneVector<ast::ForeignItem>> { self.foreign_items } fn make_stmts(self: Box<Self>) -> Option<OneVector<ast::Stmt>> { match self.stmts.as_ref().map_or(0, |s| s.len()) { 0 => make_stmts_default!(self), _ => self.stmts, } } fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> { if let Some(p) = self.pat { return Some(p); } if let Some(e) = self.expr { if let ast::ExprKind::Lit(_) = e.node { return Some(P(ast::Pat { id: ast::DUMMY_NODE_ID, span: e.span, node: PatKind::Lit(e), })); } } None } fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> { self.ty } } /// Fill-in macro expansion result, to allow compilation to continue /// after hitting errors. #[derive(Copy, Clone)] pub struct DummyResult { expr_only: bool, span: Span } impl DummyResult { /// Create a default MacResult that can be anything. /// /// Use this as a return value after hitting any errors and /// calling `span_err`. pub fn any(sp: Span) -> Box<dyn MacResult+'static> { Box::new(DummyResult { expr_only: false, span: sp }) } /// Create a default MacResult that can only be an expression. /// /// Use this for macros that must expand to an expression, so even /// if an error is encountered internally, the user will receive /// an error that they also used it in the wrong place. pub fn expr(sp: Span) -> Box<dyn MacResult+'static> { Box::new(DummyResult { expr_only: true, span: sp }) } /// A plain dummy expression. pub fn raw_expr(sp: Span) -> P<ast::Expr> { P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Lit(P(source_map::respan(sp, ast::LitKind::Bool(false)))), span: sp, attrs: ThinVec::new(), }) } /// A plain dummy pattern. pub fn raw_pat(sp: Span) -> ast::Pat { ast::Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Wild, span: sp, } } pub fn raw_ty(sp: Span) -> P<ast::Ty> { P(ast::Ty { id: ast::DUMMY_NODE_ID, node: ast::TyKind::Infer, span: sp }) } } impl MacResult for DummyResult { fn make_expr(self: Box<DummyResult>) -> Option<P<ast::Expr>> { Some(DummyResult::raw_expr(self.span)) } fn make_pat(self: Box<DummyResult>) -> Option<P<ast::Pat>> { Some(P(DummyResult::raw_pat(self.span))) } fn make_items(self: Box<DummyResult>) -> Option<OneVector<P<ast::Item>>> { // this code needs a comment... why not always just return the Some() ? if self.expr_only { None } else { Some(OneVector::new()) } } fn make_impl_items(self: Box<DummyResult>) -> Option<OneVector<ast::ImplItem>> { if self.expr_only { None } else { Some(OneVector::new()) } } fn make_trait_items(self: Box<DummyResult>) -> Option<OneVector<ast::TraitItem>> { if self.expr_only { None } else { Some(OneVector::new()) } } fn make_foreign_items(self: Box<Self>) -> Option<OneVector<ast::ForeignItem>> { if self.expr_only { None } else { Some(OneVector::new()) } } fn make_stmts(self: Box<DummyResult>) -> Option<OneVector<ast::Stmt>> { Some(smallvec![ast::Stmt { id: ast::DUMMY_NODE_ID, node: ast::StmtKind::Expr(DummyResult::raw_expr(self.span)), span: self.span, }]) } fn make_ty(self: Box<DummyResult>) -> Option<P<ast::Ty>> { Some(DummyResult::raw_ty(self.span)) } } pub type BuiltinDeriveFn = for<'cx> fn(&'cx mut ExtCtxt, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable)); /// Represents different kinds of macro invocations that can be resolved. #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum MacroKind { /// A bang macro - foo!() Bang, /// An attribute macro - #[foo] Attr, /// A derive attribute macro - #[derive(Foo)] Derive, /// A view of a procedural macro from the same crate that defines it. ProcMacroStub, } impl MacroKind { pub fn descr(self) -> &'static str { match self { MacroKind::Bang => "macro", MacroKind::Attr => "attribute macro", MacroKind::Derive => "derive macro", MacroKind::ProcMacroStub => "crate-local procedural macro", } } } /// An enum representing the different kinds of syntax extensions. pub enum SyntaxExtension { /// A trivial "extension" that does nothing, only keeps the attribute and marks it as known. NonMacroAttr { mark_used: bool }, /// A syntax extension that is attached to an item and creates new items /// based upon it. /// /// `#[derive(...)]` is a `MultiItemDecorator`. /// /// Prefer ProcMacro or MultiModifier since they are more flexible. MultiDecorator(Box<dyn MultiItemDecorator + sync::Sync + sync::Send>), /// A syntax extension that is attached to an item and modifies it /// in-place. Also allows decoration, i.e., creating new items. MultiModifier(Box<dyn MultiItemModifier + sync::Sync + sync::Send>), /// A function-like procedural macro. TokenStream -> TokenStream. ProcMacro { expander: Box<dyn ProcMacro + sync::Sync + sync::Send>, allow_internal_unstable: bool, edition: Edition, }, /// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream. /// The first TokenSteam is the attribute, the second is the annotated item. /// Allows modification of the input items and adding new items, similar to /// MultiModifier, but uses TokenStreams, rather than AST nodes. AttrProcMacro(Box<dyn AttrProcMacro + sync::Sync + sync::Send>, Edition), /// A normal, function-like syntax extension. /// /// `bytes!` is a `NormalTT`. NormalTT { expander: Box<dyn TTMacroExpander + sync::Sync + sync::Send>, def_info: Option<(ast::NodeId, Span)>, /// Whether the contents of the macro can /// directly use `#[unstable]` things (true == yes). allow_internal_unstable: bool, /// Whether the contents of the macro can use `unsafe` /// without triggering the `unsafe_code` lint. allow_internal_unsafe: bool, /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`) /// for a given macro. local_inner_macros: bool, /// The macro's feature name if it is unstable, and the stability feature unstable_feature: Option<(Symbol, u32)>, /// Edition of the crate in which the macro is defined edition: Edition, }, /// A function-like syntax extension that has an extra ident before /// the block. /// IdentTT(Box<dyn IdentMacroExpander + sync::Sync + sync::Send>, Option<Span>, bool), /// An attribute-like procedural macro. TokenStream -> TokenStream. /// The input is the annotated item. /// Allows generating code to implement a Trait for a given struct /// or enum item. ProcMacroDerive(Box<dyn MultiItemModifier + sync::Sync + sync::Send>, Vec<Symbol> /* inert attribute names */, Edition), /// An attribute-like procedural macro that derives a builtin trait. BuiltinDerive(BuiltinDeriveFn), /// A declarative macro, e.g. `macro m() {}`. DeclMacro { expander: Box<dyn TTMacroExpander + sync::Sync + sync::Send>, def_info: Option<(ast::NodeId, Span)>, is_transparent: bool, edition: Edition, } } impl SyntaxExtension { /// Return which kind of macro calls this syntax extension. pub fn kind(&self) -> MacroKind { match *self { SyntaxExtension::DeclMacro { .. } | SyntaxExtension::NormalTT { .. } | SyntaxExtension::IdentTT(..) | SyntaxExtension::ProcMacro { .. } => MacroKind::Bang, SyntaxExtension::NonMacroAttr { .. } | SyntaxExtension::MultiDecorator(..) | SyntaxExtension::MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => MacroKind::Attr, SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => MacroKind::Derive, } } pub fn default_transparency(&self) -> Transparency { match *self { SyntaxExtension::ProcMacro { .. } | SyntaxExtension::AttrProcMacro(..) | SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::DeclMacro { is_transparent: false, .. } => Transparency::Opaque, SyntaxExtension::DeclMacro { is_transparent: true, .. } => Transparency::Transparent, _ => Transparency::SemiTransparent, } } pub fn edition(&self) -> Edition { match *self { SyntaxExtension::NormalTT { edition, .. } | SyntaxExtension::DeclMacro { edition, .. } | SyntaxExtension::ProcMacro { edition, .. } | SyntaxExtension::AttrProcMacro(.., edition) | SyntaxExtension::ProcMacroDerive(.., edition) => edition, // Unstable legacy stuff SyntaxExtension::NonMacroAttr { .. } | SyntaxExtension::IdentTT(..) | SyntaxExtension::MultiDecorator(..) | SyntaxExtension::MultiModifier(..) | SyntaxExtension::BuiltinDerive(..) => hygiene::default_edition(), } } } pub type NamedSyntaxExtension = (Name, SyntaxExtension); pub trait Resolver { fn next_node_id(&mut self) -> ast::NodeId; fn get_module_scope(&mut self, id: ast::NodeId) -> Mark; fn eliminate_crate_var(&mut self, item: P<ast::Item>) -> P<ast::Item>; fn is_whitelisted_legacy_custom_derive(&self, name: Name) -> bool; fn visit_ast_fragment_with_placeholders(&mut self, mark: Mark, fragment: &AstFragment, derives: &[Mark]); fn add_builtin(&mut self, ident: ast::Ident, ext: Lrc<SyntaxExtension>); fn resolve_imports(&mut self); // Resolves attribute and derive legacy macros from `#![plugin(..)]`. fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<Attribute>, allow_derive: bool) -> Option<Attribute>; fn resolve_macro_invocation(&mut self, invoc: &Invocation, scope: Mark, force: bool) -> Result<Option<Lrc<SyntaxExtension>>, Determinacy>; fn resolve_macro_path(&mut self, path: &ast::Path, kind: MacroKind, scope: Mark, derives_in_scope: &[ast::Path], force: bool) -> Result<Lrc<SyntaxExtension>, Determinacy>; fn check_unused_macros(&self); } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Determinacy { Determined, Undetermined, } impl Determinacy { pub fn determined(determined: bool) -> Determinacy { if determined { Determinacy::Determined } else { Determinacy::Undetermined } } } pub struct DummyResolver; impl Resolver for DummyResolver { fn next_node_id(&mut self) -> ast::NodeId { ast::DUMMY_NODE_ID } fn get_module_scope(&mut self, _id: ast::NodeId) -> Mark { Mark::root() } fn eliminate_crate_var(&mut self, item: P<ast::Item>) -> P<ast::Item> { item } fn is_whitelisted_legacy_custom_derive(&self, _name: Name) -> bool { false } fn visit_ast_fragment_with_placeholders(&mut self, _invoc: Mark, _fragment: &AstFragment, _derives: &[Mark]) {} fn add_builtin(&mut self, _ident: ast::Ident, _ext: Lrc<SyntaxExtension>) {} fn resolve_imports(&mut self) {} fn find_legacy_attr_invoc(&mut self, _attrs: &mut Vec<Attribute>, _allow_derive: bool) -> Option<Attribute> { None } fn resolve_macro_invocation(&mut self, _invoc: &Invocation, _scope: Mark, _force: bool) -> Result<Option<Lrc<SyntaxExtension>>, Determinacy> { Err(Determinacy::Determined) } fn resolve_macro_path(&mut self, _path: &ast::Path, _kind: MacroKind, _scope: Mark, _derives_in_scope: &[ast::Path], _force: bool) -> Result<Lrc<SyntaxExtension>, Determinacy> { Err(Determinacy::Determined) } fn check_unused_macros(&self) {} } #[derive(Clone)] pub struct ModuleData { pub mod_path: Vec<ast::Ident>, pub directory: PathBuf, } #[derive(Clone)] pub struct ExpansionData { pub mark: Mark, pub depth: usize, pub module: Rc<ModuleData>, pub directory_ownership: DirectoryOwnership, pub crate_span: Option<Span>, } /// One of these is made during expansion and incrementally updated as we go; /// when a macro expansion occurs, the resulting nodes have the `backtrace() /// -> expn_info` of their expansion context stored into their span. pub struct ExtCtxt<'a> { pub parse_sess: &'a parse::ParseSess, pub ecfg: expand::ExpansionConfig<'a>, pub root_path: PathBuf, pub resolver: &'a mut dyn Resolver, pub resolve_err_count: usize, pub current_expansion: ExpansionData, pub expansions: HashMap<Span, Vec<String>>, } impl<'a> ExtCtxt<'a> { pub fn new(parse_sess: &'a parse::ParseSess, ecfg: expand::ExpansionConfig<'a>, resolver: &'a mut dyn Resolver) -> ExtCtxt<'a> { ExtCtxt { parse_sess, ecfg, root_path: PathBuf::new(), resolver, resolve_err_count: 0, current_expansion: ExpansionData { mark: Mark::root(), depth: 0, module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }), directory_ownership: DirectoryOwnership::Owned { relative: None }, crate_span: None, }, expansions: HashMap::new(), } } /// Returns a `Folder` for deeply expanding all macros in an AST node. pub fn expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> { expand::MacroExpander::new(self, false) } /// Returns a `Folder` that deeply expands all macros and assigns all node ids in an AST node. /// Once node ids are assigned, the node may not be expanded, removed, or otherwise modified. pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> { expand::MacroExpander::new(self, true) } pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> { parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect()) } pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() } pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { match self.current_expansion.mark.expn_info() { Some(expn_info) => expn_info.call_site, None => DUMMY_SP, } } pub fn backtrace(&self) -> SyntaxContext { SyntaxContext::empty().apply_mark(self.current_expansion.mark) } /// Returns span for the macro which originally caused the current expansion to happen. /// /// Stops backtracing at include! boundary. pub fn expansion_cause(&self) -> Option<Span> { let mut ctxt = self.backtrace(); let mut last_macro = None; loop { if ctxt.outer().expn_info().map_or(None, |info| { if info.format.name() == "include" { // Stop going up the backtrace once include! is encountered return None; } ctxt = info.call_site.ctxt(); last_macro = Some(info.call_site); Some(()) }).is_none() { break } } last_macro } pub fn struct_span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { self.parse_sess.span_diagnostic.struct_span_warn(sp, msg) } pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { self.parse_sess.span_diagnostic.struct_span_err(sp, msg) } pub fn struct_span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { self.parse_sess.span_diagnostic.struct_span_fatal(sp, msg) } /// Emit `msg` attached to `sp`, and stop compilation immediately. /// /// `span_err` should be strongly preferred where-ever possible: /// this should *only* be used when: /// /// - continuing has a high risk of flow-on errors (e.g. errors in /// declaring a macro would cause all uses of that macro to /// complain about "undefined macro"), or /// - there is literally nothing else that can be done (however, /// in most cases one can construct a dummy expression/item to /// substitute; we never hit resolve/type-checking so the dummy /// value doesn't have to match anything) pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { self.parse_sess.span_diagnostic.span_fatal(sp, msg).raise(); } /// Emit `msg` attached to `sp`, without immediately stopping /// compilation. /// /// Compilation will be stopped in the near future (at the end of /// the macro expansion phase). pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { self.parse_sess.span_diagnostic.span_err(sp, msg); } pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) { self.parse_sess.span_diagnostic.span_err_with_code(sp, msg, code); } pub fn mut_span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { self.parse_sess.span_diagnostic.mut_span_err(sp, msg) } pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { self.parse_sess.span_diagnostic.span_warn(sp, msg); } pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { self.parse_sess.span_diagnostic.span_unimpl(sp, msg); } pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { self.parse_sess.span_diagnostic.span_bug(sp, msg); } pub fn trace_macros_diag(&mut self) { for (sp, notes) in self.expansions.iter() { let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro"); for note in notes { db.note(note); } db.emit(); } // Fixme: does this result in errors? self.expansions.clear(); } pub fn bug(&self, msg: &str) -> ! { self.parse_sess.span_diagnostic.bug(msg); } pub fn trace_macros(&self) -> bool { self.ecfg.trace_mac } pub fn set_trace_macros(&mut self, x: bool) { self.ecfg.trace_mac = x } pub fn ident_of(&self, st: &str) -> ast::Ident { ast::Ident::from_str(st) } pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> { let def_site = DUMMY_SP.apply_mark(self.current_expansion.mark); iter::once(Ident::new(keywords::DollarCrate.name(), def_site)) .chain(components.iter().map(|s| self.ident_of(s))) .collect() } pub fn name_of(&self, st: &str) -> ast::Name { Symbol::intern(st) } pub fn check_unused_macros(&self) { self.resolver.check_unused_macros(); } } /// Extract a string literal from the macro expanded version of `expr`, /// emitting `err_msg` if `expr` is not a string literal. This does not stop /// compilation on error, merely emits a non-fatal error and returns None. pub fn expr_to_spanned_string<'a>( cx: &'a mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str, ) -> Result<Spanned<(Symbol, ast::StrStyle)>, DiagnosticBuilder<'a>> { // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. let expr = expr.map(|mut expr| { expr.span = expr.span.apply_mark(cx.current_expansion.mark); expr }); // we want to be able to handle e.g. `concat!("foo", "bar")` let expr = cx.expander().fold_expr(expr); Err(match expr.node { ast::ExprKind::Lit(ref l) => match l.node { ast::LitKind::Str(s, style) => return Ok(respan(expr.span, (s, style))), _ => cx.struct_span_err(l.span, err_msg) }, _ => cx.struct_span_err(expr.span, err_msg) }) } pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str) -> Option<(Symbol, ast::StrStyle)> { expr_to_spanned_string(cx, expr, err_msg) .map_err(|mut err| err.emit()) .ok() .map(|s| s.node) } /// Non-fatally assert that `tts` is empty. Note that this function /// returns even when `tts` is non-empty, macros that *need* to stop /// compilation should call /// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be /// done as rarely as possible). pub fn check_zero_tts(cx: &ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree], name: &str) { if !tts.is_empty() { cx.span_err(sp, &format!("{} takes no arguments", name)); } } /// Interpreting `tts` as a comma-separated sequence of expressions, /// expect exactly one string literal, or emit an error and return None. pub fn get_single_str_from_tts(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree], name: &str) -> Option<String> { let mut p = cx.new_parser_from_tts(tts); if p.token == token::Eof { cx.span_err(sp, &format!("{} takes 1 argument", name)); return None } let ret = panictry!(p.parse_expr()); let _ = p.eat(&token::Comma); if p.token != token::Eof { cx.span_err(sp, &format!("{} takes 1 argument", name)); } expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { s.to_string() }) } /// Extract comma-separated expressions from `tts`. If there is a /// parsing error, emit a non-fatal error and return None. pub fn get_exprs_from_tts(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> { let mut p = cx.new_parser_from_tts(tts); let mut es = Vec::new(); while p.token != token::Eof { es.push(cx.expander().fold_expr(panictry!(p.parse_expr()))); if p.eat(&token::Comma) { continue; } if p.token != token::Eof { cx.span_err(sp, "expected token: `,`"); return None; } } Some(es) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/placeholders.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::{self, NodeId}; use source_map::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{AstFragment, AstFragmentKind}; use ext::hygiene::Mark; use tokenstream::TokenStream; use fold::*; use ptr::P; use OneVector; use symbol::keywords; use ThinVec; use util::move_map::MoveMap; use std::collections::HashMap; pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { fn mac_placeholder() -> ast::Mac { dummy_spanned(ast::Mac_ { path: ast::Path { span: DUMMY_SP, segments: Vec::new() }, tts: TokenStream::empty().into(), delim: ast::MacDelimiter::Brace, }) } let ident = keywords::Invalid.ident(); let attrs = Vec::new(); let generics = ast::Generics::default(); let vis = dummy_spanned(ast::VisibilityKind::Inherited); let span = DUMMY_SP; let expr_placeholder = || P(ast::Expr { id, span, attrs: ThinVec::new(), node: ast::ExprKind::Mac(mac_placeholder()), }); match kind { AstFragmentKind::Expr => AstFragment::Expr(expr_placeholder()), AstFragmentKind::OptExpr => AstFragment::OptExpr(Some(expr_placeholder())), AstFragmentKind::Items => AstFragment::Items(smallvec![P(ast::Item { id, span, ident, vis, attrs, node: ast::ItemKind::Mac(mac_placeholder()), tokens: None, })]), AstFragmentKind::TraitItems => AstFragment::TraitItems(smallvec![ast::TraitItem { id, span, ident, attrs, generics, node: ast::TraitItemKind::Macro(mac_placeholder()), tokens: None, }]), AstFragmentKind::ImplItems => AstFragment::ImplItems(smallvec![ast::ImplItem { id, span, ident, vis, attrs, generics, node: ast::ImplItemKind::Macro(mac_placeholder()), defaultness: ast::Defaultness::Final, tokens: None, }]), AstFragmentKind::ForeignItems => AstFragment::ForeignItems(smallvec![ast::ForeignItem { id, span, ident, vis, attrs, node: ast::ForeignItemKind::Macro(mac_placeholder()), }]), AstFragmentKind::Pat => AstFragment::Pat(P(ast::Pat { id, span, node: ast::PatKind::Mac(mac_placeholder()), })), AstFragmentKind::Ty => AstFragment::Ty(P(ast::Ty { id, span, node: ast::TyKind::Mac(mac_placeholder()), })), AstFragmentKind::Stmts => AstFragment::Stmts(smallvec![{ let mac = P((mac_placeholder(), ast::MacStmtStyle::Braces, ThinVec::new())); ast::Stmt { id, span, node: ast::StmtKind::Mac(mac) } }]), } } pub struct PlaceholderExpander<'a, 'b: 'a> { expanded_fragments: HashMap<ast::NodeId, AstFragment>, cx: &'a mut ExtCtxt<'b>, monotonic: bool, } impl<'a, 'b> PlaceholderExpander<'a, 'b> { pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self { PlaceholderExpander { cx, expanded_fragments: HashMap::new(), monotonic, } } pub fn add(&mut self, id: ast::NodeId, fragment: AstFragment, derives: Vec<Mark>) { let mut fragment = fragment.fold_with(self); if let AstFragment::Items(mut items) = fragment { for derive in derives { match self.remove(NodeId::placeholder_from_mark(derive)) { AstFragment::Items(derived_items) => items.extend(derived_items), _ => unreachable!(), } } fragment = AstFragment::Items(items); } self.expanded_fragments.insert(id, fragment); } fn remove(&mut self, id: ast::NodeId) -> AstFragment { self.expanded_fragments.remove(&id).unwrap() } } impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> { fn fold_item(&mut self, item: P<ast::Item>) -> OneVector<P<ast::Item>> { match item.node { ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(), ast::ItemKind::MacroDef(_) => return smallvec![item], _ => {} } noop_fold_item(item, self) } fn fold_trait_item(&mut self, item: ast::TraitItem) -> OneVector<ast::TraitItem> { match item.node { ast::TraitItemKind::Macro(_) => self.remove(item.id).make_trait_items(), _ => noop_fold_trait_item(item, self), } } fn fold_impl_item(&mut self, item: ast::ImplItem) -> OneVector<ast::ImplItem> { match item.node { ast::ImplItemKind::Macro(_) => self.remove(item.id).make_impl_items(), _ => noop_fold_impl_item(item, self), } } fn fold_foreign_item(&mut self, item: ast::ForeignItem) -> OneVector<ast::ForeignItem> { match item.node { ast::ForeignItemKind::Macro(_) => self.remove(item.id).make_foreign_items(), _ => noop_fold_foreign_item(item, self), } } fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> { match expr.node { ast::ExprKind::Mac(_) => self.remove(expr.id).make_expr(), _ => expr.map(|expr| noop_fold_expr(expr, self)), } } fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> { match expr.node { ast::ExprKind::Mac(_) => self.remove(expr.id).make_opt_expr(), _ => noop_fold_opt_expr(expr, self), } } fn fold_stmt(&mut self, stmt: ast::Stmt) -> OneVector<ast::Stmt> { let (style, mut stmts) = match stmt.node { ast::StmtKind::Mac(mac) => (mac.1, self.remove(stmt.id).make_stmts()), _ => return noop_fold_stmt(stmt, self), }; if style == ast::MacStmtStyle::Semicolon { if let Some(stmt) = stmts.pop() { stmts.push(stmt.add_trailing_semicolon()); } } stmts } fn fold_pat(&mut self, pat: P<ast::Pat>) -> P<ast::Pat> { match pat.node { ast::PatKind::Mac(_) => self.remove(pat.id).make_pat(), _ => noop_fold_pat(pat, self), } } fn fold_ty(&mut self, ty: P<ast::Ty>) -> P<ast::Ty> { match ty.node { ast::TyKind::Mac(_) => self.remove(ty.id).make_ty(), _ => noop_fold_ty(ty, self), } } fn fold_block(&mut self, block: P<ast::Block>) -> P<ast::Block> { noop_fold_block(block, self).map(|mut block| { let mut remaining_stmts = block.stmts.len(); block.stmts = block.stmts.move_flat_map(|mut stmt| { remaining_stmts -= 1; if self.monotonic { assert_eq!(stmt.id, ast::DUMMY_NODE_ID); stmt.id = self.cx.resolver.next_node_id(); } Some(stmt) }); block }) } fn fold_mod(&mut self, module: ast::Mod) -> ast::Mod { let mut module = noop_fold_mod(module, self); module.items = module.items.move_flat_map(|item| match item.node { ast::ItemKind::Mac(_) if !self.cx.ecfg.keep_macs => None, // remove macro definitions _ => Some(item), }); module } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/tt/macro_rules.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use {ast, attr}; use syntax_pos::{Span, DUMMY_SP}; use edition::Edition; use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; use ext::base::{NormalTT, TTMacroExpander}; use ext::expand::{AstFragment, AstFragmentKind}; use ext::tt::macro_parser::{Success, Error, Failure}; use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{parse, parse_failure_msg}; use ext::tt::quoted; use ext::tt::transcribe::transcribe; use feature_gate::{self, emit_feature_err, Features, GateIssue}; use parse::{Directory, ParseSess}; use parse::parser::Parser; use parse::token::{self, NtTT}; use parse::token::Token::*; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree}; use std::borrow::Cow; use std::collections::HashMap; use std::collections::hash_map::Entry; use rustc_data_structures::sync::Lrc; pub struct ParserAnyMacro<'a> { parser: Parser<'a>, /// Span of the expansion site of the macro this parser is for site_span: Span, /// The ident of the macro we're parsing macro_ident: ast::Ident } impl<'a> ParserAnyMacro<'a> { pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment { let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self; let fragment = panictry!(parser.parse_ast_fragment(kind, true)); // We allow semicolons at the end of expressions -- e.g. the semicolon in // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`, // but `m!()` is allowed in expression positions (c.f. issue #34706). if kind == AstFragmentKind::Expr && parser.token == token::Semi { parser.bump(); } // Make sure we don't have any tokens left to parse so we don't silently drop anything. let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span)); parser.ensure_complete_parse(&path, kind.name(), site_span); fragment } } struct MacroRulesMacroExpander { name: ast::Ident, lhses: Vec<quoted::TokenTree>, rhses: Vec<quoted::TokenTree>, valid: bool, } impl TTMacroExpander for MacroRulesMacroExpander { fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, sp: Span, input: TokenStream) -> Box<dyn MacResult+'cx> { if !self.valid { return DummyResult::any(sp); } generic_extension(cx, sp, self.name, input, &self.lhses, &self.rhses) } } fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) { let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp); cx.expansions.entry(sp).or_default().push(message); } /// Given `lhses` and `rhses`, this is the new macro we create fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, sp: Span, name: ast::Ident, arg: TokenStream, lhses: &[quoted::TokenTree], rhses: &[quoted::TokenTree]) -> Box<dyn MacResult+'cx> { if cx.trace_macros() { trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg)); } // Which arm's failure should we report? (the one furthest along) let mut best_fail_spot = DUMMY_SP; let mut best_fail_tok = None; for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..], _ => cx.span_bug(sp, "malformed macro lhs") }; match TokenTree::parse(cx, lhs_tt, arg.clone()) { Success(named_matches) => { let rhs = match rhses[i] { // ignore delimiters quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), _ => cx.span_bug(sp, "malformed macro rhs"), }; let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>(); // rhs has holes ( `$id` and `$(...)` that need filled) let mut tts = transcribe(cx, Some(named_matches), rhs); // Replace all the tokens for the corresponding positions in the macro, to maintain // proper positions in error reporting, while maintaining the macro_backtrace. if rhs_spans.len() == tts.len() { tts = tts.map_enumerated(|i, tt| { let mut tt = tt.clone(); let mut sp = rhs_spans[i]; sp = sp.with_ctxt(tt.span().ctxt()); tt.set_span(sp); tt }); } if cx.trace_macros() { trace_macros_note(cx, sp, format!("to `{}`", tts)); } let directory = Directory { path: Cow::from(cx.current_expansion.module.directory.as_path()), ownership: cx.current_expansion.directory_ownership, }; let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false); p.root_module_name = cx.current_expansion.module.mod_path.last() .map(|id| id.as_str().to_string()); p.process_potential_macro_variable(); // Let the context choose how to interpret the result. // Weird, but useful for X-macros. return Box::new(ParserAnyMacro { parser: p, // Pass along the original expansion site and the name of the macro // so we can print a useful error message if the parse of the expanded // macro leaves unparsed tokens. site_span: sp, macro_ident: name }) } Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() { best_fail_spot = sp; best_fail_tok = Some(tok); }, Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) } } } let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers")); let mut err = cx.struct_span_err(best_fail_spot.substitute_dummy(sp), &best_fail_msg); // Check whether there's a missing comma in this macro call, like `println!("{}" a);` if let Some((arg, comma_span)) = arg.add_comma() { for lhs in lhses { // try each arm's matchers let lhs_tt = match *lhs { quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..], _ => continue, }; match TokenTree::parse(cx, lhs_tt, arg.clone()) { Success(_) => { if comma_span == DUMMY_SP { err.note("you might be missing a comma"); } else { err.span_suggestion_short( comma_span, "missing comma here", ", ".to_string(), ); } } _ => {} } } } err.emit(); cx.trace_macros_diag(); DummyResult::any(sp) } // Note that macro-by-example's input is also matched against a token tree: // $( $lhs:tt => $rhs:tt );+ // // Holy self-referential! /// Converts a `macro_rules!` invocation into a syntax extension. pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: Edition) -> SyntaxExtension { let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs")); let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs")); // Parse the macro_rules! invocation let body = match def.node { ast::ItemKind::MacroDef(ref body) => body, _ => unreachable!(), }; // The pattern that macro_rules matches. // The grammar for macro_rules! is: // $( $lhs:tt => $rhs:tt );+ // ...quasiquoting this would be nice. // These spans won't matter, anyways let argument_gram = vec![ quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { tts: vec![ quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), ], separator: Some(if body.legacy { token::Semi } else { token::Comma }), op: quoted::KleeneOp::OneOrMore, num_captures: 2, })), // to phase into semicolon-termination instead of semicolon-separation quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 })), ]; let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) { Success(m) => m, Failure(sp, tok) => { let s = parse_failure_msg(tok); sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); } Error(sp, s) => { sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); } }; let mut valid = true; // Extract the arguments: let lhses = match *argument_map[&lhs_nm] { MatchedSeq(ref s, _) => { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { let tt = quoted::parse( tt.clone().into(), true, sess, features, &def.attrs, edition, def.id, ) .pop() .unwrap(); valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt); return tt; } } sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }).collect::<Vec<quoted::TokenTree>>() } _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }; let rhses = match *argument_map[&rhs_nm] { MatchedSeq(ref s, _) => { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { return quoted::parse( tt.clone().into(), false, sess, features, &def.attrs, edition, def.id, ).pop() .unwrap(); } } sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }).collect::<Vec<quoted::TokenTree>>() } _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") }; for rhs in &rhses { valid &= check_rhs(sess, rhs); } // don't abort iteration early, so that errors for multiple lhses can be reported for lhs in &lhses { valid &= check_lhs_no_empty_seq(sess, &[lhs.clone()]) } let expander: Box<_> = Box::new(MacroRulesMacroExpander { name: def.ident, lhses, rhses, valid, }); if body.legacy { let allow_internal_unstable = attr::contains_name(&def.attrs, "allow_internal_unstable"); let allow_internal_unsafe = attr::contains_name(&def.attrs, "allow_internal_unsafe"); let mut local_inner_macros = false; if let Some(macro_export) = attr::find_by_name(&def.attrs, "macro_export") { if let Some(l) = macro_export.meta_item_list() { local_inner_macros = attr::list_contains_name(&l, "local_inner_macros"); } } let unstable_feature = attr::find_stability(&sess.span_diagnostic, &def.attrs, def.span).and_then(|stability| { if let attr::StabilityLevel::Unstable { issue, .. } = stability.level { Some((stability.feature, issue)) } else { None } }); NormalTT { expander, def_info: Some((def.id, def.span)), allow_internal_unstable, allow_internal_unsafe, local_inner_macros, unstable_feature, edition, } } else { let is_transparent = attr::contains_name(&def.attrs, "rustc_transparent_macro"); SyntaxExtension::DeclMacro { expander, def_info: Some((def.id, def.span)), is_transparent, edition, } } } fn check_lhs_nt_follows(sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], lhs: &quoted::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. if let quoted::TokenTree::Delimited(_, ref tts) = *lhs { check_matcher(sess, features, attrs, &tts.tts) } else { let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; sess.span_diagnostic.span_err(lhs.span(), msg); false } // we don't abort on errors on rejection, the driver will do that for us // after parsing/expansion. we can report every error in every macro this way. } /// Check that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { use self::quoted::TokenTree; for tt in tts { match *tt { TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (), TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) { return false; }, TokenTree::Sequence(span, ref seq) => { if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| { match *seq_tt { TokenTree::MetaVarDecl(_, _, id) => id.name == "vis", TokenTree::Sequence(_, ref sub_seq) => sub_seq.op == quoted::KleeneOp::ZeroOrMore, _ => false, } }) { sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); return false; } if !check_lhs_no_empty_seq(sess, &seq.tts) { return false; } } } } true } fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool { match *rhs { quoted::TokenTree::Delimited(..) => return true, _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited") } false } fn check_matcher(sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], matcher: &[quoted::TokenTree]) -> bool { let first_sets = FirstSets::new(matcher); let empty_suffix = TokenSet::empty(); let err = sess.span_diagnostic.err_count(); check_matcher_core(sess, features, attrs, &first_sets, matcher, &empty_suffix); err == sess.span_diagnostic.err_count() } // The FirstSets for a matcher is a mapping from subsequences in the // matcher to the FIRST set for that subsequence. // // This mapping is partially precomputed via a backwards scan over the // token trees of the matcher, which provides a mapping from each // repetition sequence to its FIRST set. // // (Hypothetically sequences should be uniquely identifiable via their // spans, though perhaps that is false e.g. for macro-generated macros // that do not try to inject artificial span information. My plan is // to try to catch such cases ahead of time and not include them in // the precomputed mapping.) struct FirstSets { // this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its // span in the original matcher to the First set for the inner sequence `tt ...`. // // If two sequences have the same span in a matcher, then map that // span to None (invalidating the mapping here and forcing the code to // use a slow path). first: HashMap<Span, Option<TokenSet>>, } impl FirstSets { fn new(tts: &[quoted::TokenTree]) -> FirstSets { use self::quoted::TokenTree; let mut sets = FirstSets { first: HashMap::new() }; build_recur(&mut sets, tts); return sets; // walks backward over `tts`, returning the FIRST for `tts` // and updating `sets` at the same time for all sequence // substructure we find within `tts`. fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet { let mut first = TokenSet::empty(); for tt in tts.iter().rev() { match *tt { TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { first.replace_with(tt.clone()); } TokenTree::Delimited(span, ref delimited) => { build_recur(sets, &delimited.tts[..]); first.replace_with(delimited.open_tt(span)); } TokenTree::Sequence(sp, ref seq_rep) => { let subfirst = build_recur(sets, &seq_rep.tts[..]); match sets.first.entry(sp) { Entry::Vacant(vac) => { vac.insert(Some(subfirst.clone())); } Entry::Occupied(mut occ) => { // if there is already an entry, then a span must have collided. // This should not happen with typical macro_rules macros, // but syntax extensions need not maintain distinct spans, // so distinct syntax trees can be assigned the same span. // In such a case, the map cannot be trusted; so mark this // entry as unusable. occ.insert(None); } } // If the sequence contents can be empty, then the first // token could be the separator token itself. if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } // Reverse scan: Sequence comes before `first`. if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { // If sequence is potentially empty, then // union them (preserving first emptiness). first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); } else { // Otherwise, sequence guaranteed // non-empty; replace first. first = subfirst; } } } } first } } // walks forward over `tts` until all potential FIRST tokens are // identified. fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet { use self::quoted::TokenTree; let mut first = TokenSet::empty(); for tt in tts.iter() { assert!(first.maybe_empty); match *tt { TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { first.add_one(tt.clone()); return first; } TokenTree::Delimited(span, ref delimited) => { first.add_one(delimited.open_tt(span)); return first; } TokenTree::Sequence(sp, ref seq_rep) => { match self.first.get(&sp) { Some(&Some(ref subfirst)) => { // If the sequence contents can be empty, then the first // token could be the separator token itself. if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } assert!(first.maybe_empty); first.add_all(subfirst); if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { // continue scanning for more first // tokens, but also make sure we // restore empty-tracking state first.maybe_empty = true; continue; } else { return first; } } Some(&None) => { panic!("assume all sequences have (unique) spans for now"); } None => { panic!("We missed a sequence during FirstSets construction"); } } } } } // we only exit the loop if `tts` was empty or if every // element of `tts` matches the empty sequence. assert!(first.maybe_empty); first } } // A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s // (for macro-by-example syntactic variables). It also carries the // `maybe_empty` flag; that is true if and only if the matcher can // match an empty token sequence. // // The First set is computed on submatchers like `$($a:expr b),* $(c)* d`, // which has corresponding FIRST = {$a:expr, c, d}. // Likewise, `$($a:expr b),* $(c)+ d` has FIRST = {$a:expr, c}. // // (Notably, we must allow for *-op to occur zero times.) #[derive(Clone, Debug)] struct TokenSet { tokens: Vec<quoted::TokenTree>, maybe_empty: bool, } impl TokenSet { // Returns a set for the empty sequence. fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } } // Returns the set `{ tok }` for the single-token (and thus // non-empty) sequence [tok]. fn singleton(tok: quoted::TokenTree) -> Self { TokenSet { tokens: vec![tok], maybe_empty: false } } // Changes self to be the set `{ tok }`. // Since `tok` is always present, marks self as non-empty. fn replace_with(&mut self, tok: quoted::TokenTree) { self.tokens.clear(); self.tokens.push(tok); self.maybe_empty = false; } // Changes self to be the empty set `{}`; meant for use when // the particular token does not matter, but we want to // record that it occurs. fn replace_with_irrelevant(&mut self) { self.tokens.clear(); self.maybe_empty = false; } // Adds `tok` to the set for `self`, marking sequence as non-empy. fn add_one(&mut self, tok: quoted::TokenTree) { if !self.tokens.contains(&tok) { self.tokens.push(tok); } self.maybe_empty = false; } // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.) fn add_one_maybe(&mut self, tok: quoted::TokenTree) { if !self.tokens.contains(&tok) { self.tokens.push(tok); } } // Adds all elements of `other` to this. // // (Since this is a set, we filter out duplicates.) // // If `other` is potentially empty, then preserves the previous // setting of the empty flag of `self`. If `other` is guaranteed // non-empty, then `self` is marked non-empty. fn add_all(&mut self, other: &Self) { for tok in &other.tokens { if !self.tokens.contains(tok) { self.tokens.push(tok.clone()); } } if !other.maybe_empty { self.maybe_empty = false; } } } // Checks that `matcher` is internally consistent and that it // can legally by followed by a token N, for all N in `follow`. // (If `follow` is empty, then it imposes no constraint on // the `matcher`.) // // Returns the set of NT tokens that could possibly come last in // `matcher`. (If `matcher` matches the empty sequence, then // `maybe_empty` will be set to true.) // // Requires that `first_sets` is pre-computed for `matcher`; // see `FirstSets::new`. fn check_matcher_core(sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], first_sets: &FirstSets, matcher: &[quoted::TokenTree], follow: &TokenSet) -> TokenSet { use self::quoted::TokenTree; let mut last = TokenSet::empty(); // 2. For each token and suffix [T, SUFFIX] in M: // ensure that T can be followed by SUFFIX, and if SUFFIX may be empty, // then ensure T can also be followed by any element of FOLLOW. 'each_token: for i in 0..matcher.len() { let token = &matcher[i]; let suffix = &matcher[i+1..]; let build_suffix_first = || { let mut s = first_sets.first(suffix); if s.maybe_empty { s.add_all(follow); } s }; // (we build `suffix_first` on demand below; you can tell // which cases are supposed to fall through by looking for the // initialization of this variable.) let suffix_first; // First, update `last` so that it corresponds to the set // of NT tokens that might end the sequence `... token`. match *token { TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { let can_be_followed_by_any; if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); sess.span_diagnostic.struct_span_err(token.span(), &msg) .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \ `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`") .emit(); // (This eliminates false positives and duplicates // from error messages.) can_be_followed_by_any = true; } else { can_be_followed_by_any = token_can_be_followed_by_any(token); } if can_be_followed_by_any { // don't need to track tokens that work with any, last.replace_with_irrelevant(); // ... and don't need to check tokens that can be // followed by anything against SUFFIX. continue 'each_token; } else { last.replace_with(token.clone()); suffix_first = build_suffix_first(); } } TokenTree::Delimited(span, ref d) => { let my_suffix = TokenSet::singleton(d.close_tt(span)); check_matcher_core(sess, features, attrs, first_sets, &d.tts, &my_suffix); // don't track non NT tokens last.replace_with_irrelevant(); // also, we don't need to check delimited sequences // against SUFFIX continue 'each_token; } TokenTree::Sequence(sp, ref seq_rep) => { suffix_first = build_suffix_first(); // The trick here: when we check the interior, we want // to include the separator (if any) as a potential // (but not guaranteed) element of FOLLOW. So in that // case, we make a temp copy of suffix and stuff // delimiter in there. // // FIXME: Should I first scan suffix_first to see if // delimiter is already in it before I go through the // work of cloning it? But then again, this way I may // get a "tighter" span? let mut new; let my_suffix = if let Some(ref u) = seq_rep.separator { new = suffix_first.clone(); new.add_one_maybe(TokenTree::Token(sp, u.clone())); &new } else { &suffix_first }; // At this point, `suffix_first` is built, and // `my_suffix` is some TokenSet that we can use // for checking the interior of `seq_rep`. let next = check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix); if next.maybe_empty { last.add_all(&next); } else { last = next; } // the recursive call to check_matcher_core already ran the 'each_last // check below, so we can just keep going forward here. continue 'each_token; } } // (`suffix_first` guaranteed initialized once reaching here.) // Now `last` holds the complete set of NT tokens that could // end the sequence before SUFFIX. Check that every one works with `suffix`. 'each_last: for token in &last.tokens { if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token { for next_token in &suffix_first.tokens { match is_in_follow(next_token, &frag_spec.as_str()) { Err((msg, help)) => { sess.span_diagnostic.struct_span_err(next_token.span(), &msg) .help(help).emit(); // don't bother reporting every source of // conflict for a particular element of `last`. continue 'each_last; } Ok(true) => {} Ok(false) => { let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1 { "is" } else { "may be" }; sess.span_diagnostic.span_err( next_token.span(), &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \ is not allowed for `{frag}` fragments", name=name, frag=frag_spec, next=quoted_tt_to_string(next_token), may_be=may_be) ); } } } } } } last } fn token_can_be_followed_by_any(tok: &quoted::TokenTree) -> bool { if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok { frag_can_be_followed_by_any(&frag_spec.as_str()) } else { // (Non NT's can always be followed by anthing in matchers.) true } } /// True if a fragment of type `frag` can be followed by any sort of /// token. We use this (among other things) as a useful approximation /// for when `frag` can be followed by a repetition like `$(...)*` or /// `$(...)+`. In general, these can be a bit tricky to reason about, /// so we adopt a conservative position that says that any fragment /// specifier which consumes at most one token tree can be followed by /// a fragment specifier (indeed, these fragments can be followed by /// ANYTHING without fear of future compatibility hazards). fn frag_can_be_followed_by_any(frag: &str) -> bool { match frag { "item" | // always terminated by `}` or `;` "block" | // exactly one token tree "ident" | // exactly one token tree "literal" | // exactly one token tree "meta" | // exactly one token tree "lifetime" | // exactly one token tree "tt" => // exactly one token tree true, _ => false, } } /// True if `frag` can legally be followed by the token `tok`. For /// fragments that can consume an unbounded number of tokens, `tok` /// must be within a well-defined follow set. This is intended to /// guarantee future compatibility: for example, without this rule, if /// we expanded `expr` to include a new binary operator, we might /// break macros that were relying on that binary operator as a /// separator. // when changing this do not forget to update doc/book/macros.md! fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'static str)> { use self::quoted::TokenTree; if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. Ok(true) } else { match frag { "item" => { // since items *must* be followed by either a `;` or a `}`, we can // accept anything after them Ok(true) }, "block" => { // anything can follow block, the braces provide an easy boundary to // maintain Ok(true) }, "stmt" | "expr" => match *tok { TokenTree::Token(_, ref tok) => match *tok { FatArrow | Comma | Semi => Ok(true), _ => Ok(false) }, _ => Ok(false), }, "pat" => match *tok { TokenTree::Token(_, ref tok) => match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true), _ => Ok(false) }, _ => Ok(false), }, "path" | "ty" => match *tok { TokenTree::Token(_, ref tok) => match *tok { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true), _ => Ok(false) }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true), _ => Ok(false), }, "ident" | "lifetime" => { // being a single token, idents and lifetimes are harmless Ok(true) }, "literal" => { // literals may be of a single token, or two tokens (negative numbers) Ok(true) }, "meta" | "tt" => { // being either a single token or a delimited sequence, tt is // harmless Ok(true) }, "vis" => { // Explicitly disallow `priv`, on the off chance it comes back. match *tok { TokenTree::Token(_, ref tok) => match *tok { Comma => Ok(true), Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true), ref tok => Ok(tok.can_begin_type()) }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident" || frag.name == "ty" || frag.name == "path" => Ok(true), _ => Ok(false) } }, "" => Ok(true), // keywords::Invalid _ => Err((format!("invalid fragment specifier `{}`", frag), "valid fragment specifiers are `ident`, `block`, \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \ `literal`, `item` and `vis`")) } } } fn has_legal_fragment_specifier(sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], tok: &quoted::TokenTree) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok { let frag_name = frag_spec.as_str(); let frag_span = tok.span(); if !is_legal_fragment_specifier(sess, features, attrs, &frag_name, frag_span) { return Err(frag_name.to_string()); } } Ok(()) } fn is_legal_fragment_specifier(sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], frag_name: &str, frag_span: Span) -> bool { match frag_name { "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" | "path" | "ty" | "ident" | "meta" | "tt" | "vis" | "" => true, "literal" => { if !features.macro_literal_matcher && !attr::contains_name(attrs, "allow_internal_unstable") { let explain = feature_gate::EXPLAIN_LITERAL_MATCHER; emit_feature_err(sess, "macro_literal_matcher", frag_span, GateIssue::Language, explain); } true }, _ => false, } } fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String { match *tt { quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ in follow set checker"), } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/tt/quoted.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::NodeId; use early_buffered_lints::BufferedEarlyLintId; use ext::tt::macro_parser; use feature_gate::{self, emit_feature_err, Features, GateIssue}; use parse::{token, ParseSess}; use print::pprust; use symbol::keywords; use syntax_pos::{edition::Edition, BytePos, Span}; use tokenstream; use {ast, attr}; use rustc_data_structures::sync::Lrc; use std::iter::Peekable; /// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note /// that the delimiter itself might be `NoDelim`. #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub struct Delimited { pub delim: token::DelimToken, pub tts: Vec<TokenTree>, } impl Delimited { /// Return the opening delimiter (possibly `NoDelim`). pub fn open_token(&self) -> token::Token { token::OpenDelim(self.delim) } /// Return the closing delimiter (possibly `NoDelim`). pub fn close_token(&self) -> token::Token { token::CloseDelim(self.delim) } /// Return a `self::TokenTree` with a `Span` corresponding to the opening delimiter. pub fn open_tt(&self, span: Span) -> TokenTree { let open_span = if span.is_dummy() { span } else { span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) }; TokenTree::Token(open_span, self.open_token()) } /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter. pub fn close_tt(&self, span: Span) -> TokenTree { let close_span = if span.is_dummy() { span } else { span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; TokenTree::Token(close_span, self.close_token()) } } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] pub struct SequenceRepetition { /// The sequence of token trees pub tts: Vec<TokenTree>, /// The optional separator pub separator: Option<token::Token>, /// Whether the sequence can be repeated zero (*), or one or more times (+) pub op: KleeneOp, /// The number of `Match`s that appear in the sequence (and subsequences) pub num_captures: usize, } /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) /// for token sequences. #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum KleeneOp { /// Kleene star (`*`) for zero or more repetitions ZeroOrMore, /// Kleene plus (`+`) for one or more repetitions OneOrMore, ZeroOrOne, } /// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)` /// are "first-class" token trees. Useful for parsing macros. #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { Token(Span, token::Token), Delimited(Span, Lrc<Delimited>), /// A kleene-style repetition sequence Sequence(Span, Lrc<SequenceRepetition>), /// E.g. `$var` MetaVar(Span, ast::Ident), /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros. MetaVarDecl( Span, ast::Ident, /* name to bind */ ast::Ident, /* kind of nonterminal */ ), } impl TokenTree { /// Return the number of tokens in the tree. pub fn len(&self) -> usize { match *self { TokenTree::Delimited(_, ref delimed) => match delimed.delim { token::NoDelim => delimed.tts.len(), _ => delimed.tts.len() + 2, }, TokenTree::Sequence(_, ref seq) => seq.tts.len(), _ => 0, } } /// Returns true if the given token tree contains no other tokens. This is vacuously true for /// single tokens or metavar/decls, but may be false for delimited trees or sequences. pub fn is_empty(&self) -> bool { match *self { TokenTree::Delimited(_, ref delimed) => match delimed.delim { token::NoDelim => delimed.tts.is_empty(), _ => false, }, TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(), _ => true, } } /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences. pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { delimed.tts[index].clone() } (&TokenTree::Delimited(span, ref delimed), _) => { if index == 0 { return delimed.open_tt(span); } if index == delimed.tts.len() + 1 { return delimed.close_tt(span); } delimed.tts[index - 1].clone() } (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(), _ => panic!("Cannot expand a token tree"), } } /// Retrieve the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { TokenTree::Token(sp, _) | TokenTree::MetaVar(sp, _) | TokenTree::MetaVarDecl(sp, _, _) | TokenTree::Delimited(sp, _) | TokenTree::Sequence(sp, _) => sp, } } } /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this /// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a /// collection of `TokenTree` for use in parsing a macro. /// /// # Parameters /// /// - `input`: a token stream to read from, the contents of which we are parsing. /// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a /// macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with /// their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and /// `ident` are "matchers". They are not present in the body of a macro rule -- just in the /// pattern, so we pass a parameter to indicate whether to expect them or not. /// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `features`, `attrs`: language feature flags and attributes so that we know whether to use /// unstable features or not. /// - `edition`: which edition are we in. /// - `macro_node_id`: the NodeId of the macro we are parsing. /// /// # Returns /// /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`. pub fn parse( input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], edition: Edition, macro_node_id: NodeId, ) -> Vec<TokenTree> { // Will contain the final collection of `self::TokenTree` let mut result = Vec::new(); // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming // additional trees if need be. let mut trees = input.trees().peekable(); while let Some(tree) = trees.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`). let tree = parse_tree( tree, &mut trees, expect_matchers, sess, features, attrs, edition, macro_node_id, ); match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { Some((kind, _)) => { let span = end_sp.with_lo(start_sp.lo()); result.push(TokenTree::MetaVarDecl(span, ident, kind)); continue; } _ => end_sp, }, tree => tree .as_ref() .map(tokenstream::TokenTree::span) .unwrap_or(span), }, tree => tree .as_ref() .map(tokenstream::TokenTree::span) .unwrap_or(start_sp), }; sess.missing_fragment_specifiers.borrow_mut().insert(span); result.push(TokenTree::MetaVarDecl( span, ident, keywords::Invalid.ident(), )); } // Not a metavar or no matchers allowed, so just return the tree _ => result.push(tree), } } result } /// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a /// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree` /// for use in parsing a macro. /// /// Converting the given tree may involve reading more tokens. /// /// # Parameters /// /// - `tree`: the tree we wish to convert. /// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish /// converting `tree` /// - `expect_matchers`: same as for `parse` (see above). /// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `features`, `attrs`: language feature flags and attributes so that we know whether to use /// unstable features or not. fn parse_tree<I>( tree: tokenstream::TokenTree, trees: &mut Peekable<I>, expect_matchers: bool, sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], edition: Edition, macro_node_id: NodeId, ) -> TokenTree where I: Iterator<Item = tokenstream::TokenTree>, { // Depending on what `tree` is, we could be parsing different parts of a macro match tree { // `tree` is a `$` token. Look at the next token in `trees` tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { // `tree` is followed by a delimited set of token trees. This indicates the beginning // of a repetition sequence in the macro (e.g. `$(pat)*`). Some(tokenstream::TokenTree::Delimited(span, delimited)) => { // Must have `(` not `{` or `[` if delimited.delim != token::Paren { let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim)); let msg = format!("expected `(`, found `{}`", tok); sess.span_diagnostic.span_err(span, &msg); } // Parse the contents of the sequence itself let sequence = parse( delimited.tts.into(), expect_matchers, sess, features, attrs, edition, macro_node_id, ); // Get the Kleene operator and optional separator let (separator, op) = parse_sep_and_kleene_op( trees, span, sess, features, attrs, edition, macro_node_id, ); // Count the number of captured "names" (i.e. named metavars) let name_captures = macro_parser::count_names(&sequence); TokenTree::Sequence( span, Lrc::new(SequenceRepetition { tts: sequence, separator, op, num_captures: name_captures, }), ) } // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special // metavariable that names the crate of the invocation. Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); let span = ident_span.with_lo(span.lo()); if ident.name == keywords::Crate.name() && !is_raw { let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span); TokenTree::Token(span, token::Ident(ident, is_raw)) } else { TokenTree::MetaVar(span, ident) } } // `tree` is followed by a random token. This is an error. Some(tokenstream::TokenTree::Token(span, tok)) => { let msg = format!( "expected identifier, found `{}`", pprust::token_to_string(&tok) ); sess.span_diagnostic.span_err(span, &msg); TokenTree::MetaVar(span, keywords::Invalid.ident()) } // There are no more tokens. Just return the `$` we already have. None => TokenTree::Token(span, token::Dollar), }, // `tree` is an arbitrary token. Keep it. tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), // `tree` is the beginning of a delimited set of tokens (e.g. `(` or `{`). We need to // descend into the delimited set and further parse it. tokenstream::TokenTree::Delimited(span, delimited) => TokenTree::Delimited( span, Lrc::new(Delimited { delim: delimited.delim, tts: parse( delimited.tts.into(), expect_matchers, sess, features, attrs, edition, macro_node_id, ), }), ), } } /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return /// `None`. fn kleene_op(token: &token::Token) -> Option<KleeneOp> { match *token { token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore), token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore), token::Question => Some(KleeneOp::ZeroOrOne), _ => None, } } /// Parse the next token tree of the input looking for a KleeneOp. Returns /// /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token fn parse_kleene_op<I>( input: &mut I, span: Span, ) -> Result<Result<(KleeneOp, Span), (token::Token, Span)>, Span> where I: Iterator<Item = tokenstream::TokenTree>, { match input.next() { Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { Some(op) => Ok(Ok((op, span))), None => Ok(Err((tok, span))), }, tree => Err(tree .as_ref() .map(tokenstream::TokenTree::span) .unwrap_or(span)), } } /// Attempt to parse a single Kleene star, possibly with a separator. /// /// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the /// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing /// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator /// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some /// stream of tokens in an invocation of a macro. /// /// This function will take some input iterator `input` corresponding to `span` and a parsing /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an /// error with the appropriate span is emitted to `sess` and a dummy value is returned. /// /// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018, /// `?` is a Kleene op and not a separator. fn parse_sep_and_kleene_op<I>( input: &mut Peekable<I>, span: Span, sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], edition: Edition, macro_node_id: NodeId, ) -> (Option<token::Token>, KleeneOp) where I: Iterator<Item = tokenstream::TokenTree>, { match edition { Edition::Edition2015 => parse_sep_and_kleene_op_2015( input, span, sess, features, attrs, macro_node_id, ), Edition::Edition2018 => parse_sep_and_kleene_op_2018(input, span, sess, features, attrs), _ => unimplemented!(), } } // `?` is a separator (with a migration warning) and never a KleeneOp. fn parse_sep_and_kleene_op_2015<I>( input: &mut Peekable<I>, span: Span, sess: &ParseSess, _features: &Features, _attrs: &[ast::Attribute], macro_node_id: NodeId, ) -> (Option<token::Token>, KleeneOp) where I: Iterator<Item = tokenstream::TokenTree>, { // We basically look at two token trees here, denoted as #1 and #2 below let span = match parse_kleene_op(input, span) { // #1 is a `+` or `*` KleeneOp // // `?` is ambiguous: it could be a separator (warning) or a Kleene::ZeroOrOne (error), so // we need to look ahead one more token to be sure. Ok(Ok((op, _))) if op != KleeneOp::ZeroOrOne => return (None, op), // #1 is `?` token, but it could be a Kleene::ZeroOrOne (error in 2015) without a separator // or it could be a `?` separator followed by any Kleene operator. We need to look ahead 1 // token to find out which. Ok(Ok((op, op1_span))) => { assert_eq!(op, KleeneOp::ZeroOrOne); // Lookahead at #2. If it is a KleenOp, then #1 is a separator. let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() { kleene_op(tok2).is_some() } else { false }; if is_1_sep { // #1 is a separator and #2 should be a KleepeOp. // (N.B. We need to advance the input iterator.) match parse_kleene_op(input, span) { // #2 is `?`, which is not allowed as a Kleene op in 2015 edition. Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { sess.span_diagnostic .struct_span_err(op2_span, "expected `*` or `+`") .note("`?` is not a macro repetition operator") .emit(); // Return a dummy return (None, KleeneOp::ZeroOrMore); } // #2 is a Kleene op, which is the only valid option Ok(Ok((op, _))) => { // Warn that `?` as a separator will be deprecated sess.buffer_lint( BufferedEarlyLintId::QuestionMarkMacroSep, op1_span, macro_node_id, "using `?` as a separator is deprecated and will be \ a hard error in an upcoming edition", ); return (Some(token::Question), op); } // #2 is a random token (this is an error) :( Ok(Err((_, _))) => op1_span, // #2 is not even a token at all :( Err(_) => op1_span, } } else { // `?` is not allowed as a Kleene op in 2015 sess.span_diagnostic .struct_span_err(op1_span, "expected `*` or `+`") .note("`?` is not a macro repetition operator") .emit(); // Return a dummy return (None, KleeneOp::ZeroOrMore); } } // #1 is a separator followed by #2, a KleeneOp Ok(Err((tok, span))) => match parse_kleene_op(input, span) { // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition. Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { sess.span_diagnostic .struct_span_err(op2_span, "expected `*` or `+`") .note("`?` is not a macro repetition operator") .emit(); // Return a dummy return (None, KleeneOp::ZeroOrMore); } // #2 is a KleeneOp :D Ok(Ok((op, _))) => return (Some(tok), op), // #2 is a random token :( Ok(Err((_, span))) => span, // #2 is not a token at all :( Err(span) => span, }, // #1 is not a token Err(span) => span, }; sess.span_diagnostic.span_err(span, "expected `*` or `+`"); // Return a dummy (None, KleeneOp::ZeroOrMore) } // `?` is a Kleene op, not a separator fn parse_sep_and_kleene_op_2018<I>( input: &mut Peekable<I>, span: Span, sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], ) -> (Option<token::Token>, KleeneOp) where I: Iterator<Item = tokenstream::TokenTree>, { // We basically look at two token trees here, denoted as #1 and #2 below let span = match parse_kleene_op(input, span) { // #1 is a `?` (needs feature gate) Ok(Ok((op, op1_span))) if op == KleeneOp::ZeroOrOne => { if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") { let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP; emit_feature_err( sess, "macro_at_most_once_rep", op1_span, GateIssue::Language, explain, ); op1_span } else { return (None, op); } } // #1 is a `+` or `*` KleeneOp Ok(Ok((op, _))) => return (None, op), // #1 is a separator followed by #2, a KleeneOp Ok(Err((tok, span))) => match parse_kleene_op(input, span) { // #2 is the `?` Kleene op, which does not take a separator (error) Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { // Error! if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") { // FIXME: when `?` as a Kleene op is stabilized, we only need the "does not // take a macro separator" error (i.e. the `else` case). sess.span_diagnostic .struct_span_err(op2_span, "expected `*` or `+`") .note("`?` is not a macro repetition operator") .emit(); } else { sess.span_diagnostic.span_err( span, "the `?` macro repetition operator does not take a separator", ); } // Return a dummy return (None, KleeneOp::ZeroOrMore); } // #2 is a KleeneOp :D Ok(Ok((op, _))) => return (Some(tok), op), // #2 is a random token :( Ok(Err((_, span))) => span, // #2 is not a token at all :( Err(span) => span, }, // #1 is not a token Err(span) => span, }; // If we ever get to this point, we have experienced an "unexpected token" error if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") { sess.span_diagnostic.span_err(span, "expected `*` or `+`"); } else { sess.span_diagnostic .span_err(span, "expected one of: `*`, `+`, or `?`"); } // Return a dummy (None, KleeneOp::ZeroOrMore) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/tt/transcribe.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::Ident; use ext::base::ExtCtxt; use ext::expand::Marker; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; use fold::noop_fold_tt; use parse::token::{self, Token, NtTT}; use OneVector; use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree, Delimited}; use std::rc::Rc; use rustc_data_structures::sync::Lrc; use std::mem; use std::ops::Add; use std::collections::HashMap; // An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). enum Frame { Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: Span, }, Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token>, }, } impl Frame { fn new(tts: Vec<quoted::TokenTree>) -> Frame { let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts }); Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP } } } impl Iterator for Frame { type Item = quoted::TokenTree; fn next(&mut self) -> Option<quoted::TokenTree> { match *self { Frame::Delimited { ref forest, ref mut idx, .. } => { *idx += 1; forest.tts.get(*idx - 1).cloned() } Frame::Sequence { ref forest, ref mut idx, .. } => { *idx += 1; forest.tts.get(*idx - 1).cloned() } } } } /// This can do Macro-By-Example transcription. On the other hand, if /// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can /// (and should) be None. pub fn transcribe(cx: &ExtCtxt, interp: Option<HashMap<Ident, Rc<NamedMatch>>>, src: Vec<quoted::TokenTree>) -> TokenStream { let mut stack: OneVector<Frame> = smallvec![Frame::new(src)]; let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */ let mut repeats = Vec::new(); let mut result: Vec<TokenStream> = Vec::new(); let mut result_stack = Vec::new(); loop { let tree = if let Some(tree) = stack.last_mut().unwrap().next() { tree } else { if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() { let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap(); *repeat_idx += 1; if *repeat_idx < repeat_len { *idx = 0; if let Some(sep) = sep.clone() { // repeat same span, I guess let prev_span = match result.last() { Some(stream) => stream.trees().next().unwrap().span(), None => DUMMY_SP, }; result.push(TokenTree::Token(prev_span, sep).into()); } continue } } match stack.pop().unwrap() { Frame::Sequence { .. } => { repeats.pop(); } Frame::Delimited { forest, span, .. } => { if result_stack.is_empty() { return TokenStream::concat(result); } let tree = TokenTree::Delimited(span, Delimited { delim: forest.delim, tts: TokenStream::concat(result).into(), }); result = result_stack.pop().unwrap(); result.push(tree.into()); } } continue }; match tree { quoted::TokenTree::Sequence(sp, seq) => { // FIXME(pcwalton): Bad copy. match lockstep_iter_size(&quoted::TokenTree::Sequence(sp, seq.clone()), &interpolations, &repeats) { LockstepIterSize::Unconstrained => { cx.span_fatal(sp, /* blame macro writer */ "attempted to repeat an expression \ containing no syntax \ variables matched as repeating at this depth"); } LockstepIterSize::Contradiction(ref msg) => { // FIXME #2887 blame macro invoker instead cx.span_fatal(sp, &msg[..]); } LockstepIterSize::Constraint(len, _) => { if len == 0 { if seq.op == quoted::KleeneOp::OneOrMore { // FIXME #2887 blame invoker cx.span_fatal(sp, "this must repeat at least once"); } } else { repeats.push((0, len)); stack.push(Frame::Sequence { idx: 0, sep: seq.separator.clone(), forest: seq, }); } } } } // FIXME #2887: think about span stuff here quoted::TokenTree::MetaVar(mut sp, ident) => { if let Some(cur_matched) = lookup_cur_matched(ident, &interpolations, &repeats) { if let MatchedNonterminal(ref nt) = *cur_matched { if let NtTT(ref tt) = **nt { result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); let token = TokenTree::Token(sp, Token::interpolated((**nt).clone())); result.push(token.into()); } } else { cx.span_fatal(sp, /* blame the macro writer */ &format!("variable '{}' is still repeating at this depth", ident)); } } else { let ident = Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); sp = sp.apply_mark(cx.current_expansion.mark); result.push(TokenTree::Token(sp, token::Dollar).into()); result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into()); } } quoted::TokenTree::Delimited(mut span, delimited) => { span = span.apply_mark(cx.current_expansion.mark); stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); result_stack.push(mem::replace(&mut result, Vec::new())); } quoted::TokenTree::Token(sp, tok) => { let mut marker = Marker(cx.current_expansion.mark); result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into()) } quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"), } } } fn lookup_cur_matched(ident: Ident, interpolations: &HashMap<Ident, Rc<NamedMatch>>, repeats: &[(usize, usize)]) -> Option<Rc<NamedMatch>> { interpolations.get(&ident).map(|matched| { let mut matched = matched.clone(); for &(idx, _) in repeats { let m = matched.clone(); match *m { MatchedNonterminal(_) => break, MatchedSeq(ref ads, _) => matched = Rc::new(ads[idx].clone()), } } matched }) } #[derive(Clone)] enum LockstepIterSize { Unconstrained, Constraint(usize, Ident), Contradiction(String), } impl Add for LockstepIterSize { type Output = LockstepIterSize; fn add(self, other: LockstepIterSize) -> LockstepIterSize { match self { LockstepIterSize::Unconstrained => other, LockstepIterSize::Contradiction(_) => self, LockstepIterSize::Constraint(l_len, ref l_id) => match other { LockstepIterSize::Unconstrained => self.clone(), LockstepIterSize::Contradiction(_) => other, LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self.clone(), LockstepIterSize::Constraint(r_len, r_id) => { let msg = format!("inconsistent lockstep iteration: \ '{}' has {} items, but '{}' has {}", l_id, l_len, r_id, r_len); LockstepIterSize::Contradiction(msg) } }, } } } fn lockstep_iter_size(tree: &quoted::TokenTree, interpolations: &HashMap<Ident, Rc<NamedMatch>>, repeats: &[(usize, usize)]) -> LockstepIterSize { use self::quoted::TokenTree; match *tree { TokenTree::Delimited(_, ref delimed) => { delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { size + lockstep_iter_size(tt, interpolations, repeats) }) }, TokenTree::Sequence(_, ref seq) => { seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { size + lockstep_iter_size(tt, interpolations, repeats) }) }, TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => match lookup_cur_matched(name, interpolations, repeats) { Some(matched) => match *matched { MatchedNonterminal(_) => LockstepIterSize::Unconstrained, MatchedSeq(ref ads, _) => LockstepIterSize::Constraint(ads.len(), name), }, _ => LockstepIterSize::Unconstrained }, TokenTree::Token(..) => LockstepIterSize::Unconstrained, } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/ext/tt/macro_parser.rs
// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This is an NFA-based parser, which calls out to the main rust parser for named nonterminals //! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads //! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in //! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier //! fit for Macro-by-Example-style rules. //! //! (In order to prevent the pathological case, we'd need to lazily construct the resulting //! `NamedMatch`es at the very end. It'd be a pain, and require more memory to keep around old //! items, but it would also save overhead) //! //! We don't say this parser uses the Earley algorithm, because it's unnecessarily inaccurate. //! The macro parser restricts itself to the features of finite state automata. Earley parsers //! can be described as an extension of NFAs with completion rules, prediction rules, and recursion. //! //! Quick intro to how the parser works: //! //! A 'position' is a dot in the middle of a matcher, usually represented as a //! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`. //! //! The parser walks through the input a character at a time, maintaining a list //! of threads consistent with the current position in the input string: `cur_items`. //! //! As it processes them, it fills up `eof_items` with threads that would be valid if //! the macro invocation is now over, `bb_items` with threads that are waiting on //! a Rust nonterminal like `$e:expr`, and `next_items` with threads that are waiting //! on a particular token. Most of the logic concerns moving the · through the //! repetitions indicated by Kleene stars. The rules for moving the · without //! consuming any input are called epsilon transitions. It only advances or calls //! out to the real Rust parser when no `cur_items` threads remain. //! //! Example: //! //! ```text, ignore //! Start parsing a a a a b against [· a $( a )* a b]. //! //! Remaining input: a a a a b //! next: [· a $( a )* a b] //! //! - - - Advance over an a. - - - //! //! Remaining input: a a a b //! cur: [a · $( a )* a b] //! Descend/Skip (first item). //! next: [a $( · a )* a b] [a $( a )* · a b]. //! //! - - - Advance over an a. - - - //! //! Remaining input: a a b //! cur: [a $( a · )* a b] [a $( a )* a · b] //! Follow epsilon transition: Finish/Repeat (first item) //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] //! //! - - - Advance over an a. - - - (this looks exactly like the last step) //! //! Remaining input: a b //! cur: [a $( a · )* a b] [a $( a )* a · b] //! Follow epsilon transition: Finish/Repeat (first item) //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] //! //! - - - Advance over an a. - - - (this looks exactly like the last step) //! //! Remaining input: b //! cur: [a $( a · )* a b] [a $( a )* a · b] //! Follow epsilon transition: Finish/Repeat (first item) //! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b] //! //! - - - Advance over a b. - - - //! //! Remaining input: '' //! eof: [a $( a )* a b ·] //! ``` pub use self::NamedMatch::*; pub use self::ParseResult::*; use self::TokenTreeOrTokenTreeSlice::*; use ast::Ident; use syntax_pos::{self, BytePos, Span}; use errors::FatalError; use ext::tt::quoted::{self, TokenTree}; use parse::{Directory, ParseSess}; use parse::parser::{Parser, PathStyle}; use parse::token::{self, DocComment, Nonterminal, Token}; use print::pprust; use OneVector; use symbol::keywords; use tokenstream::TokenStream; use std::mem; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use std::collections::HashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; // To avoid costly uniqueness checks, we require that `MatchSeq` always has a nonempty body. /// Either a sequence of token trees or a single one. This is used as the representation of the /// sequence of tokens that make up a matcher. #[derive(Clone)] enum TokenTreeOrTokenTreeSlice<'a> { Tt(TokenTree), TtSeq(&'a [TokenTree]), } impl<'a> TokenTreeOrTokenTreeSlice<'a> { /// Returns the number of constituent top-level token trees of `self` (top-level in that it /// will not recursively descend into subtrees). fn len(&self) -> usize { match *self { TtSeq(ref v) => v.len(), Tt(ref tt) => tt.len(), } } /// The `index`-th token tree of `self`. fn get_tt(&self, index: usize) -> TokenTree { match *self { TtSeq(ref v) => v[index].clone(), Tt(ref tt) => tt.get_tt(index), } } } /// An unzipping of `TokenTree`s... see the `stack` field of `MatcherPos`. /// /// This is used by `inner_parse_loop` to keep track of delimited submatchers that we have /// descended into. #[derive(Clone)] struct MatcherTtFrame<'a> { /// The "parent" matcher that we are descending into. elts: TokenTreeOrTokenTreeSlice<'a>, /// The position of the "dot" in `elts` at the time we descended. idx: usize, } /// Represents a single "position" (aka "matcher position", aka "item"), as described in the module /// documentation. #[derive(Clone)] struct MatcherPos<'a> { /// The token or sequence of tokens that make up the matcher top_elts: TokenTreeOrTokenTreeSlice<'a>, /// The position of the "dot" in this matcher idx: usize, /// The beginning position in the source that the beginning of this matcher corresponds to. In /// other words, the token in the source at `sp_lo` is matched against the first token of the /// matcher. sp_lo: BytePos, /// For each named metavar in the matcher, we keep track of token trees matched against the /// metavar by the black box parser. In particular, there may be more than one match per /// metavar if we are in a repetition (each repetition matches each of the variables). /// Moreover, matchers and repetitions can be nested; the `matches` field is shared (hence the /// `Rc`) among all "nested" matchers. `match_lo`, `match_cur`, and `match_hi` keep track of /// the current position of the `self` matcher position in the shared `matches` list. /// /// Also, note that while we are descending into a sequence, matchers are given their own /// `matches` vector. Only once we reach the end of a full repetition of the sequence do we add /// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep` /// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one /// wants the shared `matches`, one should use `up.matches`. matches: Vec<Rc<Vec<NamedMatch>>>, /// The position in `matches` corresponding to the first metavar in this matcher's sequence of /// token trees. In other words, the first metavar in the first token of `top_elts` corresponds /// to `matches[match_lo]`. match_lo: usize, /// The position in `matches` corresponding to the metavar we are currently trying to match /// against the source token stream. `match_lo <= match_cur <= match_hi`. match_cur: usize, /// Similar to `match_lo` except `match_hi` is the position in `matches` of the _last_ metavar /// in this matcher. match_hi: usize, // Specifically used if we are matching a repetition. If we aren't both should be `None`. /// The KleeneOp of this sequence if we are in a repetition. seq_op: Option<quoted::KleeneOp>, /// The separator if we are in a repetition sep: Option<Token>, /// The "parent" matcher position if we are in a repetition. That is, the matcher position just /// before we enter the sequence. up: Option<MatcherPosHandle<'a>>, // Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from // a delimited token tree (e.g. something wrapped in `(` `)`) or to get the contents of a doc // comment... /// When matching against matchers with nested delimited submatchers (e.g. `pat ( pat ( .. ) /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does /// that where the bottom of the stack is the outermost matcher. // Also, throughout the comments, this "descent" is often referred to as "unzipping"... stack: Vec<MatcherTtFrame<'a>>, } impl<'a> MatcherPos<'a> { /// Add `m` as a named match for the `idx`-th metavar. fn push_match(&mut self, idx: usize, m: NamedMatch) { let matches = Rc::make_mut(&mut self.matches[idx]); matches.push(m); } } // Lots of MatcherPos instances are created at runtime. Allocating them on the // heap is slow. Furthermore, using SmallVec<MatcherPos> to allocate them all // on the stack is also slow, because MatcherPos is quite a large type and // instances get moved around a lot between vectors, which requires lots of // slow memcpy calls. // // Therefore, the initial MatcherPos is always allocated on the stack, // subsequent ones (of which there aren't that many) are allocated on the heap, // and this type is used to encapsulate both cases. enum MatcherPosHandle<'a> { Ref(&'a mut MatcherPos<'a>), Box(Box<MatcherPos<'a>>), } impl<'a> Clone for MatcherPosHandle<'a> { // This always produces a new Box. fn clone(&self) -> Self { MatcherPosHandle::Box(match *self { MatcherPosHandle::Ref(ref r) => Box::new((**r).clone()), MatcherPosHandle::Box(ref b) => b.clone(), }) } } impl<'a> Deref for MatcherPosHandle<'a> { type Target = MatcherPos<'a>; fn deref(&self) -> &Self::Target { match *self { MatcherPosHandle::Ref(ref r) => r, MatcherPosHandle::Box(ref b) => b, } } } impl<'a> DerefMut for MatcherPosHandle<'a> { fn deref_mut(&mut self) -> &mut MatcherPos<'a> { match *self { MatcherPosHandle::Ref(ref mut r) => r, MatcherPosHandle::Box(ref mut b) => b, } } } /// Represents the possible results of an attempted parse. pub enum ParseResult<T> { /// Parsed successfully. Success(T), /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected /// end of macro invocation. Otherwise, it indicates that no rules expected the given token. Failure(syntax_pos::Span, Token), /// Fatal error (malformed macro?). Abort compilation. Error(syntax_pos::Span, String), } /// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es. /// This represents the mapping of metavars to the token trees they bind to. pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>; /// Count how many metavars are named in the given matcher `ms`. pub fn count_names(ms: &[TokenTree]) -> usize { ms.iter().fold(0, |count, elt| { count + match *elt { TokenTree::Sequence(_, ref seq) => seq.num_captures, TokenTree::Delimited(_, ref delim) => count_names(&delim.tts), TokenTree::MetaVar(..) => 0, TokenTree::MetaVarDecl(..) => 1, TokenTree::Token(..) => 0, } }) } /// Initialize `len` empty shared `Vec`s to be used to store matches of metavars. fn create_matches(len: usize) -> Vec<Rc<Vec<NamedMatch>>> { (0..len).into_iter().map(|_| Rc::new(Vec::new())).collect() } /// Generate the top-level matcher position in which the "dot" is before the first token of the /// matcher `ms` and we are going to start matching at position `lo` in the source. fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos { let match_idx_hi = count_names(ms); let matches = create_matches(match_idx_hi); MatcherPos { // Start with the top level matcher given to us top_elts: TtSeq(ms), // "elts" is an abbr. for "elements" // The "dot" is before the first token of the matcher idx: 0, // We start matching with byte `lo` in the source code sp_lo: lo, // Initialize `matches` to a bunch of empty `Vec`s -- one for each metavar in `top_elts`. // `match_lo` for `top_elts` is 0 and `match_hi` is `matches.len()`. `match_cur` is 0 since // we haven't actually matched anything yet. matches, match_lo: 0, match_cur: 0, match_hi: match_idx_hi, // Haven't descended into any delimiters, so empty stack stack: vec![], // Haven't descended into any sequences, so both of these are `None`. seq_op: None, sep: None, up: None, } } /// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`: /// so it is associated with a single ident in a parse, and all /// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type /// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a /// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it. /// /// The in-memory structure of a particular `NamedMatch` represents the match /// that occurred when a particular subset of a matcher was applied to a /// particular token tree. /// /// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of /// the `MatchedNonterminal`s, will depend on the token tree it was applied /// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating /// token tree. The depth of the `NamedMatch` structure will therefore depend /// only on the nesting depth of `ast::TTSeq`s in the originating /// token tree it was derived from. #[derive(Debug, Clone)] pub enum NamedMatch { MatchedSeq(Rc<Vec<NamedMatch>>, syntax_pos::Span), MatchedNonterminal(Rc<Nonterminal>), } /// Takes a sequence of token trees `ms` representing a matcher which successfully matched input /// and an iterator of items that matched input and produces a `NamedParseResult`. fn nameize<I: Iterator<Item = NamedMatch>>( sess: &ParseSess, ms: &[TokenTree], mut res: I, ) -> NamedParseResult { // Recursively descend into each type of matcher (e.g. sequences, delimited, metavars) and make // sure that each metavar has _exactly one_ binding. If a metavar does not have exactly one // binding, then there is an error. If it does, then we insert the binding into the // `NamedParseResult`. fn n_rec<I: Iterator<Item = NamedMatch>>( sess: &ParseSess, m: &TokenTree, res: &mut I, ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, ) -> Result<(), (syntax_pos::Span, String)> { match *m { TokenTree::Sequence(_, ref seq) => for next_m in &seq.tts { n_rec(sess, next_m, res.by_ref(), ret_val)? }, TokenTree::Delimited(_, ref delim) => for next_m in &delim.tts { n_rec(sess, next_m, res.by_ref(), ret_val)?; }, TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => { if sess.missing_fragment_specifiers.borrow_mut().remove(&span) { return Err((span, "missing fragment specifier".to_string())); } } TokenTree::MetaVarDecl(sp, bind_name, _) => { match ret_val.entry(bind_name) { Vacant(spot) => { // FIXME(simulacrum): Don't construct Rc here spot.insert(Rc::new(res.next().unwrap())); } Occupied(..) => { return Err((sp, format!("duplicated bind name: {}", bind_name))) } } } TokenTree::MetaVar(..) | TokenTree::Token(..) => (), } Ok(()) } let mut ret_val = HashMap::new(); for m in ms { match n_rec(sess, m, res.by_ref(), &mut ret_val) { Ok(_) => {} Err((sp, msg)) => return Error(sp, msg), } } Success(ret_val) } /// Generate an appropriate parsing failure message. For EOF, this is "unexpected end...". For /// other tokens, this is "unexpected token...". pub fn parse_failure_msg(tok: Token) -> String { match tok { token::Eof => "unexpected end of macro invocation".to_string(), _ => format!( "no rules expected the token `{}`", pprust::token_to_string(&tok) ), } } /// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison) fn token_name_eq(t1: &Token, t2: &Token) -> bool { if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { id1.name == id2.name && is_raw1 == is_raw2 } else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) { id1.name == id2.name } else { *t1 == *t2 } } /// Process the matcher positions of `cur_items` until it is empty. In the process, this will /// produce more items in `next_items`, `eof_items`, and `bb_items`. /// /// For more info about how this happens, see the module-level doc comments and the inline /// comments of this function. /// /// # Parameters /// /// - `sess`: the parsing session into which errors are emitted. /// - `cur_items`: the set of current items to be processed. This should be empty by the end of a /// successful execution of this function. /// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in /// the function `parse`. /// - `eof_items`: the set of items that would be valid if this was the EOF. /// - `bb_items`: the set of items that are waiting for the black-box parser. /// - `token`: the current token of the parser. /// - `span`: the `Span` in the source code corresponding to the token trees we are trying to match /// against the matcher positions in `cur_items`. /// /// # Returns /// /// A `ParseResult`. Note that matches are kept track of through the items generated. fn inner_parse_loop<'a>( sess: &ParseSess, cur_items: &mut OneVector<MatcherPosHandle<'a>>, next_items: &mut Vec<MatcherPosHandle<'a>>, eof_items: &mut OneVector<MatcherPosHandle<'a>>, bb_items: &mut OneVector<MatcherPosHandle<'a>>, token: &Token, span: syntax_pos::Span, ) -> ParseResult<()> { // Pop items from `cur_items` until it is empty. while let Some(mut item) = cur_items.pop() { // When unzipped trees end, remove them. This corresponds to backtracking out of a // delimited submatcher into which we already descended. In backtracking out again, we need // to advance the "dot" past the delimiters in the outer matcher. while item.idx >= item.top_elts.len() { match item.stack.pop() { Some(MatcherTtFrame { elts, idx }) => { item.top_elts = elts; item.idx = idx + 1; } None => break, } } // Get the current position of the "dot" (`idx`) in `item` and the number of token trees in // the matcher (`len`). let idx = item.idx; let len = item.top_elts.len(); // If `idx >= len`, then we are at or past the end of the matcher of `item`. if idx >= len { // We are repeating iff there is a parent. If the matcher is inside of a repetition, // then we could be at the end of a sequence or at the beginning of the next // repetition. if item.up.is_some() { // At this point, regardless of whether there is a separator, we should add all // matches from the complete repetition of the sequence to the shared, top-level // `matches` list (actually, `up.matches`, which could itself not be the top-level, // but anyway...). Moreover, we add another item to `cur_items` in which the "dot" // is at the end of the `up` matcher. This ensures that the "dot" in the `up` // matcher is also advanced sufficiently. // // NOTE: removing the condition `idx == len` allows trailing separators. if idx == len { // Get the `up` matcher let mut new_pos = item.up.clone().unwrap(); // Add matches from this repetition to the `matches` of `up` for idx in item.match_lo..item.match_hi { let sub = item.matches[idx].clone(); let span = span.with_lo(item.sp_lo); new_pos.push_match(idx, MatchedSeq(sub, span)); } // Move the "dot" past the repetition in `up` new_pos.match_cur = item.match_hi; new_pos.idx += 1; cur_items.push(new_pos); } // Check if we need a separator. if idx == len && item.sep.is_some() { // We have a separator, and it is the current token. We can advance past the // separator token. if item.sep .as_ref() .map(|sep| token_name_eq(token, sep)) .unwrap_or(false) { item.idx += 1; next_items.push(item); } } // We don't need a separator. Move the "dot" back to the beginning of the matcher // and try to match again UNLESS we are only allowed to have _one_ repetition. else if item.seq_op != Some(quoted::KleeneOp::ZeroOrOne) { item.match_cur = item.match_lo; item.idx = 0; cur_items.push(item); } } // If we are not in a repetition, then being at the end of a matcher means that we have // reached the potential end of the input. else { eof_items.push(item); } } // We are in the middle of a matcher. else { // Look at what token in the matcher we are trying to match the current token (`token`) // against. Depending on that, we may generate new items. match item.top_elts.get_tt(idx) { // Need to descend into a sequence TokenTree::Sequence(sp, seq) => { // Examine the case where there are 0 matches of this sequence if seq.op == quoted::KleeneOp::ZeroOrMore || seq.op == quoted::KleeneOp::ZeroOrOne { let mut new_item = item.clone(); new_item.match_cur += seq.num_captures; new_item.idx += 1; for idx in item.match_cur..item.match_cur + seq.num_captures { new_item.push_match(idx, MatchedSeq(Rc::new(vec![]), sp)); } cur_items.push(new_item); } let matches = create_matches(item.matches.len()); cur_items.push(MatcherPosHandle::Box(Box::new(MatcherPos { stack: vec![], sep: seq.separator.clone(), seq_op: Some(seq.op), idx: 0, matches, match_lo: item.match_cur, match_cur: item.match_cur, match_hi: item.match_cur + seq.num_captures, up: Some(item), sp_lo: sp.lo(), top_elts: Tt(TokenTree::Sequence(sp, seq)), }))); } // We need to match a metavar (but the identifier is invalid)... this is an error TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => { if sess.missing_fragment_specifiers.borrow_mut().remove(&span) { return Error(span, "missing fragment specifier".to_string()); } } // We need to match a metavar with a valid ident... call out to the black-box // parser by adding an item to `bb_items`. TokenTree::MetaVarDecl(_, _, id) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. if may_begin_with(&*id.as_str(), token) { bb_items.push(item); } } // We need to descend into a delimited submatcher or a doc comment. To do this, we // push the current matcher onto a stack and push a new item containing the // submatcher onto `cur_items`. // // At the beginning of the loop, if we reach the end of the delimited submatcher, // we pop the stack to backtrack out of the descent. seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { let lower_elts = mem::replace(&mut item.top_elts, Tt(seq)); let idx = item.idx; item.stack.push(MatcherTtFrame { elts: lower_elts, idx, }); item.idx = 0; cur_items.push(item); } // We just matched a normal token. We can just advance the parser. TokenTree::Token(_, ref t) if token_name_eq(t, token) => { item.idx += 1; next_items.push(item); } // There was another token that was not `token`... This means we can't add any // rules. NOTE that this is not necessarily an error unless _all_ items in // `cur_items` end up doing this. There may still be some other matchers that do // end up working out. TokenTree::Token(..) | TokenTree::MetaVar(..) => {} } } } // Yay a successful parse (so far)! Success(()) } /// Use the given sequence of token trees (`ms`) as a matcher. Match the given token stream `tts` /// against it and return the match. /// /// # Parameters /// /// - `sess`: The session into which errors are emitted /// - `tts`: The tokenstream we are matching against the pattern `ms` /// - `ms`: A sequence of token trees representing a pattern against which we are matching /// - `directory`: Information about the file locations (needed for the black-box parser) /// - `recurse_into_modules`: Whether or not to recurse into modules (needed for the black-box /// parser) pub fn parse( sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Option<Directory>, recurse_into_modules: bool, ) -> NamedParseResult { // Create a parser that can be used for the "black box" parts. let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true); // A queue of possible matcher positions. We initialize it with the matcher position in which // the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then // processes all of these possible matcher positions and produces possible next positions into // `next_items`. After some post-processing, the contents of `next_items` replenish `cur_items` // and we start over again. // // This MatcherPos instance is allocated on the stack. All others -- and // there are frequently *no* others! -- are allocated on the heap. let mut initial = initial_matcher_pos(ms, parser.span.lo()); let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)]; let mut next_items = Vec::new(); loop { // Matcher positions black-box parsed by parser.rs (`parser`) let mut bb_items = OneVector::new(); // Matcher positions that would be valid if the macro invocation was over now let mut eof_items = OneVector::new(); assert!(next_items.is_empty()); // Process `cur_items` until either we have finished the input or we need to get some // parsing from the black-box parser done. The result is that `next_items` will contain a // bunch of possible next matcher positions in `next_items`. match inner_parse_loop( sess, &mut cur_items, &mut next_items, &mut eof_items, &mut bb_items, &parser.token, parser.span, ) { Success(_) => {} Failure(sp, tok) => return Failure(sp, tok), Error(sp, msg) => return Error(sp, msg), } // inner parse loop handled all cur_items, so it's empty assert!(cur_items.is_empty()); // We need to do some post processing after the `inner_parser_loop`. // // Error messages here could be improved with links to original rules. // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise, // either the parse is ambiguous (which should never happen) or their is a syntax error. if token_name_eq(&parser.token, &token::Eof) { if eof_items.len() == 1 { let matches = eof_items[0] .matches .iter_mut() .map(|dv| Rc::make_mut(dv).pop().unwrap()); return nameize(sess, ms, matches); } else if eof_items.len() > 1 { return Error( parser.span, "ambiguity: multiple successful parses".to_string(), ); } else { return Failure(parser.span, token::Eof); } } // Performance hack: eof_items may share matchers via Rc with other things that we want // to modify. Dropping eof_items now may drop these refcounts to 1, preventing an // unnecessary implicit clone later in Rc::make_mut. drop(eof_items); // Another possibility is that we need to call out to parse some rust nonterminal // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong. if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 { let nts = bb_items .iter() .map(|item| match item.top_elts.get_tt(item.idx) { TokenTree::MetaVarDecl(_, bind, name) => format!("{} ('{}')", name, bind), _ => panic!(), }) .collect::<Vec<String>>() .join(" or "); return Error( parser.span, format!( "local ambiguity: multiple parsing options: {}", match next_items.len() { 0 => format!("built-in NTs {}.", nts), 1 => format!("built-in NTs {} or 1 other option.", nts), n => format!("built-in NTs {} or {} other options.", nts, n), } ), ); } // If there are no possible next positions AND we aren't waiting for the black-box parser, // then their is a syntax error. else if bb_items.is_empty() && next_items.is_empty() { return Failure(parser.span, parser.token); } // Dump all possible `next_items` into `cur_items` for the next iteration. else if !next_items.is_empty() { // Now process the next token cur_items.extend(next_items.drain(..)); parser.bump(); } // Finally, we have the case where we need to call the black-box parser to get some // nonterminal. else { assert_eq!(bb_items.len(), 1); let mut item = bb_items.pop().unwrap(); if let TokenTree::MetaVarDecl(span, _, ident) = item.top_elts.get_tt(item.idx) { let match_cur = item.match_cur; item.push_match( match_cur, MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.as_str()))), ); item.idx += 1; item.match_cur += 1; } else { unreachable!() } cur_items.push(item); } assert!(!cur_items.is_empty()); } } /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { match *token { token::Ident(ident, is_raw) if ident.name != keywords::Underscore.name() => Some((ident, is_raw)), _ => None, } } /// Checks whether a non-terminal may begin with a particular token. /// /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that /// token. Be conservative (return true) if not sure. fn may_begin_with(name: &str, token: &Token) -> bool { /// Checks whether the non-terminal may contain a single (non-keyword) identifier. fn may_be_ident(nt: &token::Nonterminal) -> bool { match *nt { token::NtItem(_) | token::NtBlock(_) | token::NtVis(_) => false, _ => true, } } match name { "expr" => token.can_begin_expr(), "ty" => token.can_begin_type(), "ident" => get_macro_ident(token).is_some(), "literal" => token.can_begin_literal_or_bool(), "vis" => match *token { // The follow-set of :vis + "priv" keyword + interpolated Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true, _ => token.can_begin_type(), }, "block" => match *token { Token::OpenDelim(token::Brace) => true, Token::Interpolated(ref nt) => match nt.0 { token::NtItem(_) | token::NtPat(_) | token::NtTy(_) | token::NtIdent(..) | token::NtMeta(_) | token::NtPath(_) | token::NtVis(_) => false, // none of these may start with '{'. _ => true, }, _ => false, }, "path" | "meta" => match *token { Token::ModSep | Token::Ident(..) => true, Token::Interpolated(ref nt) => match nt.0 { token::NtPath(_) | token::NtMeta(_) => true, _ => may_be_ident(&nt.0), }, _ => false, }, "pat" => match *token { Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) Token::OpenDelim(token::Paren) | // tuple pattern Token::OpenDelim(token::Bracket) | // slice pattern Token::BinOp(token::And) | // reference Token::BinOp(token::Minus) | // negative literal Token::AndAnd | // double reference Token::Literal(..) | // literal Token::DotDot | // range pattern (future compat) Token::DotDotDot | // range pattern (future compat) Token::ModSep | // path Token::Lt | // path (UFCS constant) Token::BinOp(token::Shl) => true, // path (double UFCS) Token::Interpolated(ref nt) => may_be_ident(&nt.0), _ => false, }, "lifetime" => match *token { Token::Lifetime(_) => true, Token::Interpolated(ref nt) => match nt.0 { token::NtLifetime(_) | token::NtTT(_) => true, _ => false, }, _ => false, }, _ => match *token { token::CloseDelim(_) => false, _ => true, }, } } /// A call to the "black-box" parser to parse some rust nonterminal. /// /// # Parameters /// /// - `p`: the "black-box" parser to use /// - `sp`: the `Span` we want to parse /// - `name`: the name of the metavar _matcher_ we want to match (e.g. `tt`, `ident`, `block`, /// etc...) /// /// # Returns /// /// The parsed nonterminal. fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { if name == "tt" { return token::NtTT(p.parse_token_tree()); } // check at the beginning and the parser checks after each bump p.process_potential_macro_variable(); match name { "item" => match panictry!(p.parse_item()) { Some(i) => token::NtItem(i), None => { p.fatal("expected an item keyword").emit(); FatalError.raise(); } }, "block" => token::NtBlock(panictry!(p.parse_block())), "stmt" => match panictry!(p.parse_stmt()) { Some(s) => token::NtStmt(s), None => { p.fatal("expected a statement").emit(); FatalError.raise(); } }, "pat" => token::NtPat(panictry!(p.parse_pat())), "expr" => token::NtExpr(panictry!(p.parse_expr())), "literal" => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())), "ty" => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one "ident" => if let Some((ident, is_raw)) = get_macro_ident(&p.token) { let span = p.span; p.bump(); token::NtIdent(Ident::new(ident.name, span), is_raw) } else { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", &token_str)).emit(); FatalError.raise() } "path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))), "meta" => token::NtMeta(panictry!(p.parse_meta_item())), "vis" => token::NtVis(panictry!(p.parse_visibility(true))), "lifetime" => if p.check_lifetime() { token::NtLifetime(p.expect_lifetime().ident) } else { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected a lifetime, found `{}`", &token_str)).emit(); FatalError.raise(); } // this is not supposed to happen, since it has been checked // when compiling the macro. _ => p.span_bug(sp, "invalid fragment specifier"), } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/classify.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Routines the parser uses to classify AST nodes // Predicates on exprs and stmts that the pretty-printer and parser use use ast; /// Does this expression require a semicolon to be treated /// as a statement? The negation of this: 'can this expression /// be used as a statement without a semicolon' -- is used /// as an early-bail-out in the parser so that, for instance, /// if true {...} else {...} /// |x| 5 /// isn't parsed as (if true {...} else {...} | x) | 5 pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool { match e.node { ast::ExprKind::If(..) | ast::ExprKind::IfLet(..) | ast::ExprKind::Match(..) | ast::ExprKind::Block(..) | ast::ExprKind::While(..) | ast::ExprKind::WhileLet(..) | ast::ExprKind::Loop(..) | ast::ExprKind::ForLoop(..) | ast::ExprKind::Catch(..) => false, _ => true, } } /// this statement requires a semicolon after it. /// note that in one case (`stmt_semi`), we've already /// seen the semicolon, and thus don't need another. pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool { match *stmt { ast::StmtKind::Local(_) => true, ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e), ast::StmtKind::Item(_) | ast::StmtKind::Semi(..) | ast::StmtKind::Mac(..) => false, } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/token.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::BinOpToken::*; pub use self::Nonterminal::*; pub use self::DelimToken::*; pub use self::Lit::*; pub use self::Token::*; use ast::{self}; use parse::ParseSess; use print::pprust; use ptr::P; use serialize::{Decodable, Decoder, Encodable, Encoder}; use symbol::keywords; use syntax::parse::parse_stream_from_source_str; use syntax_pos::{self, Span, FileName}; use syntax_pos::symbol::{self, Symbol}; use tokenstream::{TokenStream, TokenTree}; use tokenstream; use std::{cmp, fmt}; use std::mem; use rustc_data_structures::sync::{Lrc, Lock}; #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BinOpToken { Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr, } /// A delimiter token #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum DelimToken { /// A round parenthesis: `(` or `)` Paren, /// A square bracket: `[` or `]` Bracket, /// A curly brace: `{` or `}` Brace, /// An empty delimiter NoDelim, } impl DelimToken { pub fn len(self) -> usize { if self == NoDelim { 0 } else { 1 } } pub fn is_empty(self) -> bool { self == NoDelim } } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum Lit { Byte(ast::Name), Char(ast::Name), Integer(ast::Name), Float(ast::Name), Str_(ast::Name), StrRaw(ast::Name, u16), /* raw str delimited by n hash symbols */ ByteStr(ast::Name), ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */ } impl Lit { crate fn short_name(&self) -> &'static str { match *self { Byte(_) => "byte", Char(_) => "char", Integer(_) => "integer", Float(_) => "float", Str_(_) | StrRaw(..) => "string", ByteStr(_) | ByteStrRaw(..) => "byte string" } } // See comments in `interpolated_to_tokenstream` for why we care about // *probably* equal here rather than actual equality fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool { mem::discriminant(self) == mem::discriminant(other) } } pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { let ident_token: Token = Ident(ident, is_raw); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || [ keywords::Async.name(), keywords::Do.name(), keywords::Box.name(), keywords::Break.name(), keywords::Continue.name(), keywords::False.name(), keywords::For.name(), keywords::If.name(), keywords::Loop.name(), keywords::Match.name(), keywords::Move.name(), keywords::Return.name(), keywords::True.name(), keywords::Unsafe.name(), keywords::While.name(), keywords::Yield.name(), keywords::Static.name(), ].contains(&ident.name) } fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { let ident_token: Token = Ident(ident, is_raw); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || [ keywords::Underscore.name(), keywords::For.name(), keywords::Impl.name(), keywords::Fn.name(), keywords::Unsafe.name(), keywords::Extern.name(), keywords::Typeof.name(), ].contains(&ident.name) } #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] pub enum Token { /* Expression-operator symbols. */ Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, BinOp(BinOpToken), BinOpEq(BinOpToken), /* Structural symbols */ At, Dot, DotDot, DotDotDot, DotDotEq, DotEq, // HACK(durka42) never produced by the parser, only used for libproc_macro Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, /// Used by proc macros for representing lifetimes, not generated by lexer right now. SingleQuote, /// An opening delimiter, eg. `{` OpenDelim(DelimToken), /// A closing delimiter, eg. `}` CloseDelim(DelimToken), /* Literals */ Literal(Lit, Option<ast::Name>), /* Name components */ Ident(ast::Ident, /* is_raw */ bool), Lifetime(ast::Ident), // The `LazyTokenStream` is a pure function of the `Nonterminal`, // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. Interpolated(Lrc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for // them. Instead, users could extract that from the associated span. /// Whitespace Whitespace, /// Comment Comment, Shebang(ast::Name), Eof, } impl Token { pub fn interpolated(nt: Nonterminal) -> Token { Token::Interpolated(Lrc::new((nt, LazyTokenStream::new()))) } /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: ast::Ident) -> Token { Ident(ident, ident.is_raw_guess()) } crate fn is_like_plus(&self) -> bool { match *self { BinOp(Plus) | BinOpEq(Plus) => true, _ => false, } } /// Returns `true` if the token can appear at the start of an expression. crate fn can_begin_expr(&self) -> bool { match *self { Ident(ident, is_raw) => ident_can_begin_expr(ident, is_raw), // value name or keyword OpenDelim(..) | // tuple, array or block Literal(..) | // literal Not | // operator not BinOp(Minus) | // unary minus BinOp(Star) | // dereference BinOp(Or) | OrOr | // closure BinOp(And) | // reference AndAnd | // double reference // DotDotDot is no longer supported, but we need some way to display the error DotDot | DotDotDot | DotDotEq | // range notation Lt | BinOp(Shl) | // associated path ModSep | // global path Lifetime(..) | // labeled loop Pound => true, // expression attributes Interpolated(ref nt) => match nt.0 { NtLiteral(..) | NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) | NtLifetime(..) => true, _ => false, }, _ => false, } } /// Returns `true` if the token can appear at the start of a type. crate fn can_begin_type(&self) -> bool { match *self { Ident(ident, is_raw) => ident_can_begin_type(ident, is_raw), // type name or keyword OpenDelim(Paren) | // tuple OpenDelim(Bracket) | // array Not | // never BinOp(Star) | // raw pointer BinOp(And) | // reference AndAnd | // double reference Question | // maybe bound in trait object Lifetime(..) | // lifetime bound in trait object Lt | BinOp(Shl) | // associated path ModSep => true, // global path Interpolated(ref nt) => match nt.0 { NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true, _ => false, }, _ => false, } } /// Returns `true` if the token can appear at the start of a generic bound. crate fn can_begin_bound(&self) -> bool { self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) || self == &Question || self == &OpenDelim(Paren) } /// Returns `true` if the token is any literal crate fn is_lit(&self) -> bool { match *self { Literal(..) => true, _ => false, } } /// Returns `true` if the token is any literal, a minus (which can follow a literal, /// for example a '-42', or one of the boolean idents). crate fn can_begin_literal_or_bool(&self) -> bool { match *self { Literal(..) => true, BinOp(Minus) => true, Ident(ident, false) if ident.name == keywords::True.name() => true, Ident(ident, false) if ident.name == keywords::False.name() => true, Interpolated(ref nt) => match nt.0 { NtLiteral(..) => true, _ => false, }, _ => false, } } /// Returns an identifier if this token is an identifier. pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { match *self { Ident(ident, is_raw) => Some((ident, is_raw)), Interpolated(ref nt) => match nt.0 { NtIdent(ident, is_raw) => Some((ident, is_raw)), _ => None, }, _ => None, } } /// Returns a lifetime identifier if this token is a lifetime. pub fn lifetime(&self) -> Option<ast::Ident> { match *self { Lifetime(ident) => Some(ident), Interpolated(ref nt) => match nt.0 { NtLifetime(ident) => Some(ident), _ => None, }, _ => None, } } /// Returns `true` if the token is an identifier. pub fn is_ident(&self) -> bool { self.ident().is_some() } /// Returns `true` if the token is a lifetime. crate fn is_lifetime(&self) -> bool { self.lifetime().is_some() } /// Returns `true` if the token is a identifier whose name is the given /// string slice. crate fn is_ident_named(&self, name: &str) -> bool { match self.ident() { Some((ident, _)) => ident.as_str() == name, None => false } } /// Returns `true` if the token is an interpolated path. fn is_path(&self) -> bool { if let Interpolated(ref nt) = *self { if let NtPath(..) = nt.0 { return true; } } false } /// Returns `true` if the token is either the `mut` or `const` keyword. crate fn is_mutability(&self) -> bool { self.is_keyword(keywords::Mut) || self.is_keyword(keywords::Const) } crate fn is_qpath_start(&self) -> bool { self == &Lt || self == &BinOp(Shl) } crate fn is_path_start(&self) -> bool { self == &ModSep || self.is_qpath_start() || self.is_path() || self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident() } /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { self.ident().map(|(ident, is_raw)| ident.name == kw.name() && !is_raw).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { match self.ident() { Some((id, false)) => id.is_path_segment_keyword(), _ => false, } } // Returns true for reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. pub fn is_special_ident(&self) -> bool { match self.ident() { Some((id, false)) => id.is_special(), _ => false, } } /// Returns `true` if the token is a keyword used in the language. crate fn is_used_keyword(&self) -> bool { match self.ident() { Some((id, false)) => id.is_used_keyword(), _ => false, } } /// Returns `true` if the token is a keyword reserved for possible future use. crate fn is_unused_keyword(&self) -> bool { match self.ident() { Some((id, false)) => id.is_unused_keyword(), _ => false, } } /// Returns `true` if the token is either a special identifier or a keyword. pub fn is_reserved_ident(&self) -> bool { match self.ident() { Some((id, false)) => id.is_reserved(), _ => false, } } crate fn glue(self, joint: Token) -> Option<Token> { Some(match self { Eq => match joint { Eq => EqEq, Gt => FatArrow, _ => return None, }, Lt => match joint { Eq => Le, Lt => BinOp(Shl), Le => BinOpEq(Shl), BinOp(Minus) => LArrow, _ => return None, }, Gt => match joint { Eq => Ge, Gt => BinOp(Shr), Ge => BinOpEq(Shr), _ => return None, }, Not => match joint { Eq => Ne, _ => return None, }, BinOp(op) => match joint { Eq => BinOpEq(op), BinOp(And) if op == And => AndAnd, BinOp(Or) if op == Or => OrOr, Gt if op == Minus => RArrow, _ => return None, }, Dot => match joint { Dot => DotDot, DotDot => DotDotDot, DotEq => DotDotEq, _ => return None, }, DotDot => match joint { Dot => DotDotDot, Eq => DotDotEq, _ => return None, }, Colon => match joint { Colon => ModSep, _ => return None, }, SingleQuote => match joint { Ident(ident, false) => { let name = Symbol::intern(&format!("'{}", ident)); Lifetime(symbol::Ident { name, span: ident.span, }) } _ => return None, }, Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | DotEq | DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | OpenDelim(..) | CloseDelim(..) => return None, Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) | Whitespace | Comment | Shebang(..) | Eof => return None, }) } /// Returns tokens that are likely to be typed accidentally instead of the current token. /// Enables better error recovery when the wrong token is found. crate fn similar_tokens(&self) -> Option<Vec<Token>> { match *self { Comma => Some(vec![Dot, Lt]), Semi => Some(vec![Colon]), _ => None } } pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream { let nt = match *self { Token::Interpolated(ref nt) => nt, _ => panic!("only works on interpolated tokens"), }; // An `Interpolated` token means that we have a `Nonterminal` // which is often a parsed AST item. At this point we now need // to convert the parsed AST to an actual token stream, e.g. // un-parse it basically. // // Unfortunately there's not really a great way to do that in a // guaranteed lossless fashion right now. The fallback here is // to just stringify the AST node and reparse it, but this loses // all span information. // // As a result, some AST nodes are annotated with the token // stream they came from. Here we attempt to extract these // lossless token streams before we fall back to the // stringification. let mut tokens = None; match nt.0 { Nonterminal::NtItem(ref item) => { tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span); } Nonterminal::NtTraitItem(ref item) => { tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span); } Nonterminal::NtImplItem(ref item) => { tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span); } Nonterminal::NtIdent(ident, is_raw) => { let token = Token::Ident(ident, is_raw); tokens = Some(TokenTree::Token(ident.span, token).into()); } Nonterminal::NtLifetime(ident) => { let token = Token::Lifetime(ident); tokens = Some(TokenTree::Token(ident.span, token).into()); } Nonterminal::NtTT(ref tt) => { tokens = Some(tt.clone().into()); } _ => {} } let tokens_for_real = nt.1.force(|| { // FIXME(#43081): Avoid this pretty-print + reparse hack let source = pprust::token_to_string(self); parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span)) }); // During early phases of the compiler the AST could get modified // directly (e.g. attributes added or removed) and the internal cache // of tokens my not be invalidated or updated. Consequently if the // "lossless" token stream disagrees with our actual stringification // (which has historically been much more battle-tested) then we go // with the lossy stream anyway (losing span information). // // Note that the comparison isn't `==` here to avoid comparing spans, // but it *also* is a "probable" equality which is a pretty weird // definition. We mostly want to catch actual changes to the AST // like a `#[cfg]` being processed or some weird `macro_rules!` // expansion. // // What we *don't* want to catch is the fact that a user-defined // literal like `0xf` is stringified as `15`, causing the cached token // stream to not be literal `==` token-wise (ignoring spans) to the // token stream we got from stringification. // // Instead the "probably equal" check here is "does each token // recursively have the same discriminant?" We basically don't look at // the token values here and assume that such fine grained modifications // of token streams doesn't happen. if let Some(tokens) = tokens { if tokens.probably_equal_for_proc_macro(&tokens_for_real) { return tokens } info!("cached tokens found, but they're not \"probably equal\", \ going with stringified version"); } return tokens_for_real } // See comments in `interpolated_to_tokenstream` for why we care about // *probably* equal here rather than actual equality crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool { if mem::discriminant(self) != mem::discriminant(other) { return false } match (self, other) { (&Eq, &Eq) | (&Lt, &Lt) | (&Le, &Le) | (&EqEq, &EqEq) | (&Ne, &Ne) | (&Ge, &Ge) | (&Gt, &Gt) | (&AndAnd, &AndAnd) | (&OrOr, &OrOr) | (&Not, &Not) | (&Tilde, &Tilde) | (&At, &At) | (&Dot, &Dot) | (&DotDot, &DotDot) | (&DotDotDot, &DotDotDot) | (&DotDotEq, &DotDotEq) | (&DotEq, &DotEq) | (&Comma, &Comma) | (&Semi, &Semi) | (&Colon, &Colon) | (&ModSep, &ModSep) | (&RArrow, &RArrow) | (&LArrow, &LArrow) | (&FatArrow, &FatArrow) | (&Pound, &Pound) | (&Dollar, &Dollar) | (&Question, &Question) | (&Whitespace, &Whitespace) | (&Comment, &Comment) | (&Eof, &Eof) => true, (&BinOp(a), &BinOp(b)) | (&BinOpEq(a), &BinOpEq(b)) => a == b, (&OpenDelim(a), &OpenDelim(b)) | (&CloseDelim(a), &CloseDelim(b)) => a == b, (&DocComment(a), &DocComment(b)) | (&Shebang(a), &Shebang(b)) => a == b, (&Lifetime(a), &Lifetime(b)) => a.name == b.name, (&Ident(a, b), &Ident(c, d)) => a.name == c.name && b == d, (&Literal(ref a, b), &Literal(ref c, d)) => { b == d && a.probably_equal_for_proc_macro(c) } (&Interpolated(_), &Interpolated(_)) => false, _ => panic!("forgot to add a token?"), } } } #[derive(Clone, RustcEncodable, RustcDecodable)] /// For interpolation during macro expansion. pub enum Nonterminal { NtItem(P<ast::Item>), NtBlock(P<ast::Block>), NtStmt(ast::Stmt), NtPat(P<ast::Pat>), NtExpr(P<ast::Expr>), NtTy(P<ast::Ty>), NtIdent(ast::Ident, /* is_raw */ bool), NtLifetime(ast::Ident), NtLiteral(P<ast::Expr>), /// Stuff inside brackets for attributes NtMeta(ast::MetaItem), NtPath(ast::Path), NtVis(ast::Visibility), NtTT(TokenTree), // These are not exposed to macros, but are used by quasiquote. NtArm(ast::Arm), NtImplItem(ast::ImplItem), NtTraitItem(ast::TraitItem), NtForeignItem(ast::ForeignItem), NtGenerics(ast::Generics), NtWhereClause(ast::WhereClause), NtArg(ast::Arg), } impl PartialEq for Nonterminal { fn eq(&self, rhs: &Self) -> bool { match (self, rhs) { (NtIdent(ident_lhs, is_raw_lhs), NtIdent(ident_rhs, is_raw_rhs)) => ident_lhs == ident_rhs && is_raw_lhs == is_raw_rhs, (NtLifetime(ident_lhs), NtLifetime(ident_rhs)) => ident_lhs == ident_rhs, (NtTT(tt_lhs), NtTT(tt_rhs)) => tt_lhs == tt_rhs, // FIXME: Assume that all "complex" nonterminal are not equal, we can't compare them // correctly based on data from AST. This will prevent them from matching each other // in macros. The comparison will become possible only when each nonterminal has an // attached token stream from which it was parsed. _ => false, } } } impl fmt::Debug for Nonterminal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { NtItem(..) => f.pad("NtItem(..)"), NtBlock(..) => f.pad("NtBlock(..)"), NtStmt(..) => f.pad("NtStmt(..)"), NtPat(..) => f.pad("NtPat(..)"), NtExpr(..) => f.pad("NtExpr(..)"), NtTy(..) => f.pad("NtTy(..)"), NtIdent(..) => f.pad("NtIdent(..)"), NtLiteral(..) => f.pad("NtLiteral(..)"), NtMeta(..) => f.pad("NtMeta(..)"), NtPath(..) => f.pad("NtPath(..)"), NtTT(..) => f.pad("NtTT(..)"), NtArm(..) => f.pad("NtArm(..)"), NtImplItem(..) => f.pad("NtImplItem(..)"), NtTraitItem(..) => f.pad("NtTraitItem(..)"), NtForeignItem(..) => f.pad("NtForeignItem(..)"), NtGenerics(..) => f.pad("NtGenerics(..)"), NtWhereClause(..) => f.pad("NtWhereClause(..)"), NtArg(..) => f.pad("NtArg(..)"), NtVis(..) => f.pad("NtVis(..)"), NtLifetime(..) => f.pad("NtLifetime(..)"), } } } crate fn is_op(tok: &Token) -> bool { match *tok { OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) | Lifetime(..) | Interpolated(..) | Whitespace | Comment | Shebang(..) | Eof => false, _ => true, } } #[derive(Clone)] pub struct LazyTokenStream(Lock<Option<TokenStream>>); impl cmp::Eq for LazyTokenStream {} impl PartialEq for LazyTokenStream { fn eq(&self, _other: &LazyTokenStream) -> bool { true } } impl fmt::Debug for LazyTokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&self.clone().0.into_inner(), f) } } impl LazyTokenStream { fn new() -> Self { LazyTokenStream(Lock::new(None)) } fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream { let mut opt_stream = self.0.lock(); if opt_stream.is_none() { *opt_stream = Some(f()); } opt_stream.clone().unwrap() } } impl Encodable for LazyTokenStream { fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> { Ok(()) } } impl Decodable for LazyTokenStream { fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> { Ok(LazyTokenStream::new()) } } impl ::std::hash::Hash for LazyTokenStream { fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {} } fn prepend_attrs(sess: &ParseSess, attrs: &[ast::Attribute], tokens: Option<&tokenstream::TokenStream>, span: syntax_pos::Span) -> Option<tokenstream::TokenStream> { let tokens = tokens?; if attrs.len() == 0 { return Some(tokens.clone()) } let mut builder = tokenstream::TokenStreamBuilder::new(); for attr in attrs { assert_eq!(attr.style, ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing"); if attr.is_sugared_doc { let stream = parse_stream_from_source_str( FileName::MacroExpansion, pprust::attr_to_string(attr), sess, Some(span), ); builder.push(stream); continue } // synthesize # [ $path $tokens ] manually here let mut brackets = tokenstream::TokenStreamBuilder::new(); // For simple paths, push the identifier directly if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; let token = Ident(ident, ident.as_str().starts_with("r#")); brackets.push(tokenstream::TokenTree::Token(ident.span, token)); // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. } else { let stream = parse_stream_from_source_str( FileName::MacroExpansion, pprust::path_to_string(&attr.path), sess, Some(span), ); brackets.push(stream); } brackets.push(attr.tokens.clone()); let tokens = tokenstream::Delimited { delim: DelimToken::Bracket, tts: brackets.build().into(), }; // The span we list here for `#` and for `[ ... ]` are both wrong in // that it encompasses more than each token, but it hopefully is "good // enough" for now at least. builder.push(tokenstream::TokenTree::Token(attr.span, Pound)); builder.push(tokenstream::TokenTree::Delimited(attr.span, tokens)); } builder.push(tokens.clone()); Some(builder.build()) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/attr.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use attr; use ast; use source_map::respan; use parse::{SeqSep, PResult}; use parse::token::{self, Nonterminal, DelimToken}; use parse::parser::{Parser, TokenType, PathStyle}; use tokenstream::{TokenStream, TokenTree}; #[derive(Debug)] enum InnerAttributeParsePolicy<'a> { Permitted, NotPermitted { reason: &'a str }, } const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &'static str = "an inner attribute is not \ permitted in this context"; impl<'a> Parser<'a> { /// Parse attributes that appear before an item crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let mut attrs: Vec<ast::Attribute> = Vec::new(); let mut just_parsed_doc_comment = false; loop { debug!("parse_outer_attributes: self.token={:?}", self.token); match self.token { token::Pound => { let inner_error_reason = if just_parsed_doc_comment { "an inner attribute is not permitted following an outer doc comment" } else if !attrs.is_empty() { "an inner attribute is not permitted following an outer attribute" } else { DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG }; let inner_parse_policy = InnerAttributeParsePolicy::NotPermitted { reason: inner_error_reason }; attrs.push(self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?); just_parsed_doc_comment = false; } token::DocComment(s) => { let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style != ast::AttrStyle::Outer { let mut err = self.fatal("expected outer doc comment"); err.note("inner doc comments like this (starting with \ `//!` or `/*!`) can only appear before items"); return Err(err); } attrs.push(attr); self.bump(); just_parsed_doc_comment = true; } _ => break, } } Ok(attrs) } /// Matches `attribute = # ! [ meta_item ]` /// /// If permit_inner is true, then a leading `!` indicates an inner /// attribute pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> { debug!("parse_attribute: permit_inner={:?} self.token={:?}", permit_inner, self.token); let inner_parse_policy = if permit_inner { InnerAttributeParsePolicy::Permitted } else { InnerAttributeParsePolicy::NotPermitted { reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG } }; self.parse_attribute_with_inner_parse_policy(inner_parse_policy) } /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy` /// that prescribes how to handle inner attributes. fn parse_attribute_with_inner_parse_policy(&mut self, inner_parse_policy: InnerAttributeParsePolicy) -> PResult<'a, ast::Attribute> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); let (span, path, tokens, style) = match self.token { token::Pound => { let lo = self.span; self.bump(); if let InnerAttributeParsePolicy::Permitted = inner_parse_policy { self.expected_tokens.push(TokenType::Token(token::Not)); } let style = if self.token == token::Not { self.bump(); if let InnerAttributeParsePolicy::NotPermitted { reason } = inner_parse_policy { let span = self.span; self.diagnostic() .struct_span_err(span, reason) .note("inner attributes, like `#![no_std]`, annotate the item \ enclosing them, and are usually found at the beginning of \ source files. Outer attributes, like `#[test]`, annotate the \ item following them.") .emit() } ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; self.expect(&token::OpenDelim(token::Bracket))?; let (path, tokens) = self.parse_meta_item_unrestricted()?; self.expect(&token::CloseDelim(token::Bracket))?; let hi = self.prev_span; (lo.to(hi), path, tokens, style) } _ => { let token_str = self.this_token_to_string(); return Err(self.fatal(&format!("expected `#`, found `{}`", token_str))); } }; Ok(ast::Attribute { id: attr::mk_attr_id(), style, path, tokens, is_sugared_doc: false, span, }) } /// Parse an inner part of attribute - path and following tokens. /// The tokens must be either a delimited token stream, or empty token stream, /// or the "legacy" key-value form. /// PATH `(` TOKEN_STREAM `)` /// PATH `[` TOKEN_STREAM `]` /// PATH `{` TOKEN_STREAM `}` /// PATH /// PATH `=` TOKEN_TREE /// The delimiters or `=` are still put into the resulting token stream. crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { let meta = match self.token { token::Interpolated(ref nt) => match nt.0 { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, }, _ => None, }; Ok(if let Some(meta) = meta { self.bump(); (meta.ident, meta.node.tokens(meta.span)) } else { let path = self.parse_path(PathStyle::Mod)?; let tokens = if self.check(&token::OpenDelim(DelimToken::Paren)) || self.check(&token::OpenDelim(DelimToken::Bracket)) || self.check(&token::OpenDelim(DelimToken::Brace)) { self.parse_token_tree().into() } else if self.eat(&token::Eq) { let eq = TokenTree::Token(self.prev_span, token::Eq); let tree = match self.token { token::CloseDelim(_) | token::Eof => self.unexpected()?, _ => self.parse_token_tree(), }; TokenStream::concat(vec![eq.into(), tree.into()]) } else { TokenStream::empty() }; (path, tokens) }) } /// Parse attributes that appear after the opening of an item. These should /// be preceded by an exclamation mark, but we accept and warn about one /// terminated by a semicolon. /// matches inner_attrs* crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let mut attrs: Vec<ast::Attribute> = vec![]; loop { match self.token { token::Pound => { // Don't even try to parse if it's not an inner attribute. if !self.look_ahead(1, |t| t == &token::Not) { break; } let attr = self.parse_attribute(true)?; assert_eq!(attr.style, ast::AttrStyle::Inner); attrs.push(attr); } token::DocComment(s) => { // we need to get the position of this token before we bump. let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style == ast::AttrStyle::Inner { attrs.push(attr); self.bump(); } else { break; } } _ => break, } } Ok(attrs) } fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> { let lit = self.parse_lit()?; debug!("Checking if {:?} is unusuffixed.", lit); if !lit.node.is_unsuffixed() { let msg = "suffixed literals are not allowed in attributes"; self.diagnostic().struct_span_err(lit.span, msg) .help("instead of using a suffixed literal \ (1u8, 1.0f32, etc.), use an unsuffixed version \ (1, 1.0, etc.).") .emit() } Ok(lit) } /// Per RFC#1559, matches the following grammar: /// /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token { token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref e) => Some(e.clone()), _ => None, }, _ => None, }; if let Some(meta) = nt_meta { self.bump(); return Ok(meta); } let lo = self.span; let ident = self.parse_path(PathStyle::Mod)?; let node = self.parse_meta_item_kind()?; let span = lo.to(self.prev_span); Ok(ast::MetaItem { ident, node, span }) } crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { Ok(if self.eat(&token::Eq) { ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) } else if self.eat(&token::OpenDelim(token::Paren)) { ast::MetaItemKind::List(self.parse_meta_seq()?) } else { ast::MetaItemKind::Word }) } /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ; fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> { let lo = self.span; match self.parse_unsuffixed_lit() { Ok(lit) => { return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::Literal(lit))) } Err(ref mut err) => self.diagnostic().cancel(err) } match self.parse_meta_item() { Ok(mi) => { return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::MetaItem(mi))) } Err(ref mut err) => self.diagnostic().cancel(err) } let found = self.this_token_to_string(); let msg = format!("expected unsuffixed literal or identifier, found {}", found); Err(self.diagnostic().struct_span_err(lo, &msg)) } /// matches meta_seq = ( COMMASEP(meta_item_inner) ) fn parse_meta_seq(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> { self.parse_seq_to_end(&token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), |p: &mut Parser<'a>| p.parse_meta_item_inner()) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/mod.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The main parser interface use rustc_data_structures::sync::{Lrc, Lock}; use ast::{self, CrateConfig, NodeId}; use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use source_map::{SourceMap, FilePathMapping}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; use ptr::P; use str::char_at; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree}; use diagnostics::plugin::ErrorMap; use std::borrow::Cow; use std::collections::HashSet; use std::iter; use std::path::{Path, PathBuf}; use std::str; pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>; #[macro_use] pub mod parser; pub mod lexer; pub mod token; pub mod attr; pub mod classify; /// Info about a parsing session. pub struct ParseSess { pub span_diagnostic: Handler, pub unstable_features: UnstableFeatures, pub config: CrateConfig, pub missing_fragment_specifiers: Lock<HashSet<Span>>, /// Places where raw identifiers were used. This is used for feature gating /// raw identifiers pub raw_identifier_spans: Lock<Vec<Span>>, /// The registered diagnostics codes crate registered_diagnostics: Lock<ErrorMap>, // Spans where a `mod foo;` statement was included in a non-mod.rs file. // These are used to issue errors if the non_modrs_mods feature is not enabled. pub non_modrs_mods: Lock<Vec<(ast::Ident, Span)>>, /// Used to determine and report recursive mod inclusions included_mod_stack: Lock<Vec<PathBuf>>, code_map: Lrc<SourceMap>, pub buffered_lints: Lock<Vec<BufferedEarlyLint>>, } impl ParseSess { pub fn new(file_path_mapping: FilePathMapping) -> Self { let cm = Lrc::new(SourceMap::new(file_path_mapping)); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(cm.clone())); ParseSess::with_span_handler(handler, cm) } pub fn with_span_handler(handler: Handler, code_map: Lrc<SourceMap>) -> ParseSess { ParseSess { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(), config: HashSet::new(), missing_fragment_specifiers: Lock::new(HashSet::new()), raw_identifier_spans: Lock::new(Vec::new()), registered_diagnostics: Lock::new(ErrorMap::new()), included_mod_stack: Lock::new(vec![]), code_map, non_modrs_mods: Lock::new(vec![]), buffered_lints: Lock::new(vec![]), } } pub fn source_map(&self) -> &SourceMap { &self.code_map } pub fn buffer_lint<S: Into<MultiSpan>>(&self, lint_id: BufferedEarlyLintId, span: S, id: NodeId, msg: &str, ) { self.buffered_lints.with_lock(|buffered_lints| { buffered_lints.push(BufferedEarlyLint{ span: span.into(), id, msg: msg.into(), lint_id, }); }); } } #[derive(Clone)] pub struct Directory<'a> { pub path: Cow<'a, Path>, pub ownership: DirectoryOwnership, } #[derive(Copy, Clone)] pub enum DirectoryOwnership { Owned { // None if `mod.rs`, `Some("foo")` if we're in `foo.rs` relative: Option<ast::Ident>, }, UnownedViaBlock, UnownedViaMod(bool /* legacy warnings? */), } // a bunch of utility functions of the form parse_<thing>_from_<source> // where <thing> includes crate, expr, item, stmt, tts, and one that // uses a HOF to parse anything, and <source> includes file and // source_str. pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { let mut parser = new_parser_from_file(sess, input); parser.parse_crate_mod() } pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, Vec<ast::Attribute>> { let mut parser = new_parser_from_file(sess, input); parser.parse_inner_attributes() } pub fn parse_crate_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<ast::Crate> { new_parser_from_source_str(sess, name, source).parse_crate_mod() } pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<Vec<ast::Attribute>> { new_parser_from_source_str(sess, name, source).parse_inner_attributes() } crate fn parse_expr_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<P<ast::Expr>> { new_parser_from_source_str(sess, name, source).parse_expr() } /// Parses an item. /// /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err` /// when a syntax error occurred. crate fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<Option<P<ast::Item>>> { new_parser_from_source_str(sess, name, source).parse_item() } crate fn parse_stmt_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<Option<ast::Stmt>> { new_parser_from_source_str(sess, name, source).parse_stmt() } pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, override_span: Option<Span>) -> TokenStream { source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } // Create a new parser from a source string pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser { let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source)); parser.recurse_into_file_modules = false; parser } /// Create a new parser, handling errors as appropriate /// if the file doesn't exist pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { source_file_to_parser(sess, file_to_source_file(sess, path, None)) } /// Given a session, a crate config, a path, and a span, add /// the file at the given path to the source_map, and return a parser. /// On an error, use the given span as the source of the problem. crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, directory_ownership: DirectoryOwnership, module_name: Option<String>, sp: Span) -> Parser<'a> { let mut p = source_file_to_parser(sess, file_to_source_file(sess, path, Some(sp))); p.directory.ownership = directory_ownership; p.root_module_name = module_name; p } /// Given a source_file and config, return a parser fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser { let end_pos = source_file.end_pos; let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None)); if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } parser } // must preserve old name for now, because quote! from the *existing* // compiler expands into it pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser { stream_to_parser(sess, tts.into_iter().collect()) } // base abstractions /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's source_map and return the new source_file. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> { match sess.source_map().load_file(path) { Ok(source_file) => source_file, Err(e) => { let msg = format!("couldn't read {:?}: {}", path.display(), e); match spanopt { Some(sp) => sess.span_diagnostic.span_fatal(sp, &msg).raise(), None => sess.span_diagnostic.fatal(&msg).raise() } } } } /// Given a source_file, produce a sequence of token-trees pub fn source_file_to_stream(sess: &ParseSess, source_file: Lrc<SourceFile>, override_span: Option<Span>) -> TokenStream { let mut srdr = lexer::StringReader::new(sess, source_file, override_span); srdr.real_token(); panictry!(srdr.parse_all_token_trees()) } /// Given stream and the `ParseSess`, produce a parser pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser { Parser::new(sess, stream, None, true, false) } /// Parse a string representing a character literal into its final form. /// Rather than just accepting/rejecting a given literal, unescapes it as /// well. Can take any slice prefixed by a character escape. Returns the /// character and the number of characters consumed. fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) { use std::char; // Handle non-escaped chars first. if lit.as_bytes()[0] != b'\\' { // If the first byte isn't '\\' it might part of a multi-byte char, so // get the char with chars(). let c = lit.chars().next().unwrap(); return (c, 1); } // Handle escaped chars. match lit.as_bytes()[1] as char { '"' => ('"', 2), 'n' => ('\n', 2), 'r' => ('\r', 2), 't' => ('\t', 2), '\\' => ('\\', 2), '\'' => ('\'', 2), '0' => ('\0', 2), 'x' => { let v = u32::from_str_radix(&lit[2..4], 16).unwrap(); let c = char::from_u32(v).unwrap(); (c, 4) } 'u' => { assert_eq!(lit.as_bytes()[2], b'{'); let idx = lit.find('}').unwrap(); // All digits and '_' are ascii, so treat each byte as a char. let mut v: u32 = 0; for c in lit[3..idx].bytes() { let c = char::from(c); if c != '_' { let x = c.to_digit(16).unwrap(); v = v.checked_mul(16).unwrap().checked_add(x).unwrap(); } } let c = char::from_u32(v).unwrap_or_else(|| { if let Some((span, diag)) = diag { let mut diag = diag.struct_span_err(span, "invalid unicode character escape"); if v > 0x10FFFF { diag.help("unicode escape must be at most 10FFFF").emit(); } else { diag.help("unicode escape must not be a surrogate").emit(); } } '\u{FFFD}' }); (c, (idx + 1) as isize) } _ => panic!("lexer should have rejected a bad character escape {}", lit) } } /// Parse a string representing a string literal into its final form. Does /// unescaping. pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String { debug!("str_lit: given {}", lit.escape_default()); let mut res = String::with_capacity(lit.len()); let error = |i| format!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) { loop { match it.peek().map(|x| x.1) { Some(' ') | Some('\n') | Some('\r') | Some('\t') => { it.next(); }, _ => { break; } } } } let mut chars = lit.char_indices().peekable(); while let Some((i, c)) = chars.next() { match c { '\\' => { let ch = chars.peek().unwrap_or_else(|| { panic!("{}", error(i)) }).1; if ch == '\n' { eat(&mut chars); } else if ch == '\r' { chars.next(); let ch = chars.peek().unwrap_or_else(|| { panic!("{}", error(i)) }).1; if ch != '\n' { panic!("lexer accepted bare CR"); } eat(&mut chars); } else { // otherwise, a normal escape let (c, n) = char_lit(&lit[i..], diag); for _ in 0..n - 1 { // we don't need to move past the first \ chars.next(); } res.push(c); } }, '\r' => { let ch = chars.peek().unwrap_or_else(|| { panic!("{}", error(i)) }).1; if ch != '\n' { panic!("lexer accepted bare CR"); } chars.next(); res.push('\n'); } c => res.push(c), } } res.shrink_to_fit(); // probably not going to do anything, unless there was an escape. debug!("parse_str_lit: returning {}", res); res } /// Parse a string representing a raw string literal into its final form. The /// only operation this does is convert embedded CRLF into a single LF. fn raw_str_lit(lit: &str) -> String { debug!("raw_str_lit: given {}", lit.escape_default()); let mut res = String::with_capacity(lit.len()); let mut chars = lit.chars().peekable(); while let Some(c) = chars.next() { if c == '\r' { if *chars.peek().unwrap() != '\n' { panic!("lexer accepted bare CR"); } chars.next(); res.push('\n'); } else { res.push(c); } } res.shrink_to_fit(); res } // check if `s` looks like i32 or u1234 etc. fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && first_chars.contains(&char_at(s, 0)) && s[1..].chars().all(|c| '0' <= c && c <= '9') } macro_rules! err { ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => { match $opt_diag { Some(($span, $diag)) => { $($body)* } None => return None, } } } crate fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>) -> (bool /* suffix illegal? */, Option<ast::LitKind>) { use ast::LitKind; match lit { token::Byte(i) => (true, Some(LitKind::Byte(byte_lit(&i.as_str()).0))), token::Char(i) => (true, Some(LitKind::Char(char_lit(&i.as_str(), diag).0))), // There are some valid suffixes for integer and float literals, // so all the handling is done internally. token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)), token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)), token::Str_(mut sym) => { // If there are no characters requiring special treatment we can // reuse the symbol from the Token. Otherwise, we must generate a // new symbol because the string in the LitKind is different to the // string in the Token. let s = &sym.as_str(); if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') { sym = Symbol::intern(&str_lit(s, diag)); } (true, Some(LitKind::Str(sym, ast::StrStyle::Cooked))) } token::StrRaw(mut sym, n) => { // Ditto. let s = &sym.as_str(); if s.contains('\r') { sym = Symbol::intern(&raw_str_lit(s)); } (true, Some(LitKind::Str(sym, ast::StrStyle::Raw(n)))) } token::ByteStr(i) => { (true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str())))) } token::ByteStrRaw(i, _) => { (true, Some(LitKind::ByteStr(Lrc::new(i.to_string().into_bytes())))) } } } fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) -> Option<ast::LitKind> { debug!("filtered_float_lit: {}, {:?}", data, suffix); let suffix = match suffix { Some(suffix) => suffix, None => return Some(ast::LitKind::FloatUnsuffixed(data)), }; Some(match &*suffix.as_str() { "f32" => ast::LitKind::Float(data, ast::FloatTy::F32), "f64" => ast::LitKind::Float(data, ast::FloatTy::F64), suf => { err!(diag, |span, diag| { if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. let msg = format!("invalid width `{}` for float literal", &suf[1..]); diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit() } else { let msg = format!("invalid suffix `{}` for float literal", suf); diag.struct_span_err(span, &msg) .help("valid suffixes are `f32` and `f64`") .emit(); } }); ast::LitKind::FloatUnsuffixed(data) } }) } fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) -> Option<ast::LitKind> { debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is deferred until trans let s = s.chars().filter(|&c| c != '_').collect::<String>(); filtered_float_lit(Symbol::intern(&s), suffix, diag) } /// Parse a string representing a byte literal into its final form. Similar to `char_lit` fn byte_lit(lit: &str) -> (u8, usize) { let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i); if lit.len() == 1 { (lit.as_bytes()[0], 1) } else { assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0)); let b = match lit.as_bytes()[1] { b'"' => b'"', b'n' => b'\n', b'r' => b'\r', b't' => b'\t', b'\\' => b'\\', b'\'' => b'\'', b'0' => b'\0', _ => { match u64::from_str_radix(&lit[2..4], 16).ok() { Some(c) => if c > 0xFF { panic!(err(2)) } else { return (c as u8, 4) }, None => panic!(err(3)) } } }; (b, 2) } } fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> { let mut res = Vec::with_capacity(lit.len()); let error = |i| panic!("lexer should have rejected {} at {}", lit, i); /// Eat everything up to a non-whitespace fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) { loop { match it.peek().map(|x| x.1) { Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => { it.next(); }, _ => { break; } } } } // byte string literals *must* be ASCII, but the escapes don't have to be let mut chars = lit.bytes().enumerate().peekable(); loop { match chars.next() { Some((i, b'\\')) => { match chars.peek().unwrap_or_else(|| error(i)).1 { b'\n' => eat(&mut chars), b'\r' => { chars.next(); if chars.peek().unwrap_or_else(|| error(i)).1 != b'\n' { panic!("lexer accepted bare CR"); } eat(&mut chars); } _ => { // otherwise, a normal escape let (c, n) = byte_lit(&lit[i..]); // we don't need to move past the first \ for _ in 0..n - 1 { chars.next(); } res.push(c); } } }, Some((i, b'\r')) => { if chars.peek().unwrap_or_else(|| error(i)).1 != b'\n' { panic!("lexer accepted bare CR"); } chars.next(); res.push(b'\n'); } Some((_, c)) => res.push(c), None => break, } } Lrc::new(res) } fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) -> Option<ast::LitKind> { // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::<String>(); let mut s = &s2[..]; debug!("integer_lit: {}, {:?}", s, suffix); let mut base = 10; let orig = s; let mut ty = ast::LitIntType::Unsuffixed; if char_at(s, 0) == '0' && s.len() > 1 { match char_at(s, 1) { 'x' => base = 16, 'o' => base = 8, 'b' => base = 2, _ => { } } } // 1f64 and 2f32 etc. are valid float literals. if let Some(suf) = suffix { if looks_like_width_suffix(&['f'], &suf.as_str()) { let err = match base { 16 => Some("hexadecimal float literal is not supported"), 8 => Some("octal float literal is not supported"), 2 => Some("binary float literal is not supported"), _ => None, }; if let Some(err) = err { err!(diag, |span, diag| diag.span_err(span, err)); } return filtered_float_lit(Symbol::intern(s), Some(suf), diag) } } if base != 10 { s = &s[2..]; } if let Some(suf) = suffix { if suf.as_str().is_empty() { err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some")); } ty = match &*suf.as_str() { "isize" => ast::LitIntType::Signed(ast::IntTy::Isize), "i8" => ast::LitIntType::Signed(ast::IntTy::I8), "i16" => ast::LitIntType::Signed(ast::IntTy::I16), "i32" => ast::LitIntType::Signed(ast::IntTy::I32), "i64" => ast::LitIntType::Signed(ast::IntTy::I64), "i128" => ast::LitIntType::Signed(ast::IntTy::I128), "usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize), "u8" => ast::LitIntType::Unsigned(ast::UintTy::U8), "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16), "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32), "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64), "u128" => ast::LitIntType::Unsigned(ast::UintTy::U128), suf => { // i<digits> and u<digits> look like widths, so lets // give an error message along those lines err!(diag, |span, diag| { if looks_like_width_suffix(&['i', 'u'], suf) { let msg = format!("invalid width `{}` for integer literal", &suf[1..]); diag.struct_span_err(span, &msg) .help("valid widths are 8, 16, 32, 64 and 128") .emit(); } else { let msg = format!("invalid suffix `{}` for numeric literal", suf); diag.struct_span_err(span, &msg) .help("the suffix must be one of the integral types \ (`u32`, `isize`, etc)") .emit(); } }); ty } } } debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \ string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix); Some(match u128::from_str_radix(s, base) { Ok(r) => ast::LitKind::Int(r, ty), Err(_) => { // small bases are lexed as if they were base 10, e.g, the string // might be `0b10201`. This will cause the conversion above to fail, // but these cases have errors in the lexer: we don't want to emit // two errors, and we especially don't want to emit this error since // it isn't necessarily true. let already_errored = base < 10 && s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base)); if !already_errored { err!(diag, |span, diag| diag.span_err(span, "int literal is too large")); } ast::LitKind::Int(0, ty) } }) } /// `SeqSep` : a sequence separator (token) /// and whether a trailing separator is allowed. pub struct SeqSep { pub sep: Option<token::Token>, pub trailing_sep_allowed: bool, } impl SeqSep { pub fn trailing_allowed(t: token::Token) -> SeqSep { SeqSep { sep: Some(t), trailing_sep_allowed: true, } } pub fn none() -> SeqSep { SeqSep { sep: None, trailing_sep_allowed: false, } } } #[cfg(test)] mod tests { use super::*; use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION}; use ast::{self, Ident, PatKind}; use attr::first_attr_value_str_by_name; use parse; use print::pprust::item_to_string; use tokenstream::{self, TokenTree}; use util::parser_testing::string_to_stream; use util::parser_testing::{string_to_expr, string_to_item}; use with_globals; // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { Span::new(BytePos(a), BytePos(b), NO_EXPANSION) } #[should_panic] #[test] fn bad_path_expr_1() { with_globals(|| { string_to_expr("::abc::def::return".to_string()); }) } // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { with_globals(|| { let tts: Vec<_> = string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); let tts: &[TokenTree] = &tts[..]; match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))), Some(&TokenTree::Token(_, token::Not)), Some(&TokenTree::Token(_, token::Ident(name_zip, false))), Some(&TokenTree::Delimited(_, ref macro_delimed)), ) if name_macro_rules.name == "macro_rules" && name_zip.name == "zip" => { let tts = &macro_delimed.stream().trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, Some(&TokenTree::Delimited(_, ref first_delimed)), Some(&TokenTree::Token(_, token::FatArrow)), Some(&TokenTree::Delimited(_, ref second_delimed)), ) if macro_delimed.delim == token::Paren => { let tts = &first_delimed.stream().trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Ident(ident, false))), ) if first_delimed.delim == token::Paren && ident.name == "a" => {}, _ => panic!("value 3: {:?}", *first_delimed), } let tts = &second_delimed.stream().trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Ident(ident, false))), ) if second_delimed.delim == token::Paren && ident.name == "a" => {}, _ => panic!("value 4: {:?}", *second_delimed), } }, _ => panic!("value 2: {:?}", *macro_delimed), } }, _ => panic!("value: {:?}",tts), } }) } #[test] fn string_to_tts_1() { with_globals(|| { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let expected = TokenStream::concat(vec![ TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), TokenTree::Delimited( sp(5, 14), tokenstream::Delimited { delim: token::DelimToken::Paren, tts: TokenStream::concat(vec![ TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"), false)).into(), TokenTree::Token(sp(8, 9), token::Colon).into(), TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"), false)).into(), ]).into(), }).into(), TokenTree::Delimited( sp(15, 21), tokenstream::Delimited { delim: token::DelimToken::Brace, tts: TokenStream::concat(vec![ TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"), false)).into(), TokenTree::Token(sp(18, 19), token::Semi).into(), ]).into(), }).into() ]); assert_eq!(tts, expected); }) } #[test] fn parse_use() { with_globals(|| { let use_s = "use foo::bar::baz;"; let vitem = string_to_item(use_s.to_string()).unwrap(); let vitem_s = item_to_string(&vitem); assert_eq!(&vitem_s[..], use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_item(use_s.to_string()).unwrap(); let vitem_s = item_to_string(&vitem); assert_eq!(&vitem_s[..], use_s); }) } #[test] fn parse_extern_crate() { with_globals(|| { let ex_s = "extern crate foo;"; let vitem = string_to_item(ex_s.to_string()).unwrap(); let vitem_s = item_to_string(&vitem); assert_eq!(&vitem_s[..], ex_s); let ex_s = "extern crate foo as bar;"; let vitem = string_to_item(ex_s.to_string()).unwrap(); let vitem_s = item_to_string(&vitem); assert_eq!(&vitem_s[..], ex_s); }) } fn get_spans_of_pat_idents(src: &str) -> Vec<Span> { let item = string_to_item(src.to_string()).unwrap(); struct PatIdentVisitor { spans: Vec<Span> } impl<'a> ::visit::Visitor<'a> for PatIdentVisitor { fn visit_pat(&mut self, p: &'a ast::Pat) { match p.node { PatKind::Ident(_ , ref spannedident, _) => { self.spans.push(spannedident.span.clone()); } _ => { ::visit::walk_pat(self, p); } } } } let mut v = PatIdentVisitor { spans: Vec::new() }; ::visit::walk_item(&mut v, &item); return v.spans; } #[test] fn span_of_self_arg_pat_idents_are_correct() { with_globals(|| { let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", "impl z { fn a (&mut self, &myarg: i32) {} }", "impl z { fn a (&'a self, &myarg: i32) {} }", "impl z { fn a (self, &myarg: i32) {} }", "impl z { fn a (self: Foo, &myarg: i32) {} }", ]; for &src in &srcs { let spans = get_spans_of_pat_idents(src); let (lo, hi) = (spans[0].lo(), spans[0].hi()); assert!("self" == &src[lo.to_usize()..hi.to_usize()], "\"{}\" != \"self\". src=\"{}\"", &src[lo.to_usize()..hi.to_usize()], src) } }) } #[test] fn parse_exprs () { with_globals(|| { // just make sure that they parse.... string_to_expr("3 + 4".to_string()); string_to_expr("a::z.froob(b,&(987+3))".to_string()); }) } #[test] fn attrs_fix_bug () { with_globals(|| { string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result<Box<Writer>, String> { #[cfg(windows)] fn wb() -> c_int { (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int } #[cfg(unix)] fn wb() -> c_int { O_WRONLY as c_int } let mut fflags: c_int = wb(); }".to_string()); }) } #[test] fn crlf_doc_comments() { with_globals(|| { let sess = ParseSess::new(FilePathMapping::empty()); let name = FileName::Custom("source".to_string()); let source = "/// doc comment\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(doc, "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); let docs = item.attrs.iter().filter(|a| a.path == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[..], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(doc, "/** doc comment\n * with CRLF */"); }); } #[test] fn ttdelim_span() { with_globals(|| { let sess = ParseSess::new(FilePathMapping::empty()); let expr = parse::parse_expr_from_source_str(PathBuf::from("foo").into(), "foo!( fn main() { body } )".to_string(), &sess).unwrap(); let tts: Vec<_> = match expr.node { ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), _ => panic!("not a macro"), }; let span = tts.iter().rev().next().unwrap().span(); match sess.source_map().span_to_snippet(span) { Ok(s) => assert_eq!(&s[..], "{ body }"), Err(_) => panic!("could not get snippet"), } }); } // This tests that when parsing a string (rather than a file) we don't try // and read in a file for a module declaration and just parse a stub. // See `recurse_into_file_modules` in the parser. #[test] fn out_of_line_mod() { with_globals(|| { let sess = ParseSess::new(FilePathMapping::empty()); let item = parse_item_from_source_str( PathBuf::from("foo").into(), "mod foo { struct S; mod this_does_not_exist; }".to_owned(), &sess, ).unwrap().unwrap(); if let ast::ItemKind::Mod(ref m) = item.node { assert!(m.items.len() == 2); } else { panic!(); } }); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/parser.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use rustc_target::spec::abi::{self, Abi}; use ast::{AngleBracketedArgs, ParenthesisedArgs, AttrStyle, BareFnTy}; use ast::{GenericBound, TraitBoundModifier}; use ast::Unsafety; use ast::{Mod, AnonConst, Arg, Arm, Attribute, BindingMode, TraitItemKind}; use ast::Block; use ast::{BlockCheckMode, CaptureBy, Movability}; use ast::{Constness, Crate}; use ast::Defaultness; use ast::EnumDef; use ast::{Expr, ExprKind, RangeLimits}; use ast::{Field, FnDecl, FnHeader}; use ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; use ast::{GenericParam, GenericParamKind}; use ast::GenericArg; use ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind}; use ast::{Label, Lifetime, Lit, LitKind}; use ast::Local; use ast::MacStmtStyle; use ast::{Mac, Mac_, MacDelimiter}; use ast::{MutTy, Mutability}; use ast::{Pat, PatKind, PathSegment}; use ast::{PolyTraitRef, QSelf}; use ast::{Stmt, StmtKind}; use ast::{VariantData, StructField}; use ast::StrStyle; use ast::SelfKind; use ast::{TraitItem, TraitRef, TraitObjectSyntax}; use ast::{Ty, TyKind, TypeBinding, GenericBounds}; use ast::{Visibility, VisibilityKind, WhereClause, CrateSugar}; use ast::{UseTree, UseTreeKind}; use ast::{BinOpKind, UnOp}; use ast::{RangeEnd, RangeSyntax}; use {ast, attr}; use source_map::{self, SourceMap, Spanned, respan}; use syntax_pos::{self, Span, MultiSpan, BytePos, FileName, edition::Edition}; use errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use parse::{self, SeqSep, classify, token}; use parse::lexer::TokenAndSpan; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; use ThinVec; use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream}; use symbol::{Symbol, keywords}; use std::borrow::Cow; use std::cmp; use std::mem; use std::path::{self, Path, PathBuf}; use std::slice; #[derive(Debug)] /// Whether the type alias or associated type is a concrete type or an existential type pub enum AliasKind { /// Just a new name for the same type Weak(P<Ty>), /// Only trait impls of the type will be usable, not the actual type itself Existential(GenericBounds), } bitflags! { struct Restrictions: u8 { const STMT_EXPR = 1 << 0; const NO_STRUCT_LITERAL = 1 << 1; } } type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>); /// How to parse a path. #[derive(Copy, Clone, PartialEq)] pub enum PathStyle { /// In some contexts, notably in expressions, paths with generic arguments are ambiguous /// with something else. For example, in expressions `segment < ....` can be interpreted /// as a comparison and `segment ( ....` can be interpreted as a function call. /// In all such contexts the non-path interpretation is preferred by default for practical /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g. /// `x<y>` - comparisons, `x::<y>` - unambiguously a path. Expr, /// In other contexts, notably in types, no ambiguity exists and paths can be written /// without the disambiguator, e.g. `x<y>` - unambiguously a path. /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too. Type, /// A path with generic arguments disallowed, e.g. `foo::bar::Baz`, used in imports, /// visibilities or attributes. /// Technically, this variant is unnecessary and e.g. `Expr` can be used instead /// (paths in "mod" contexts have to be checked later for absence of generic arguments /// anyway, due to macros), but it is used to avoid weird suggestions about expected /// tokens when something goes wrong. Mod, } #[derive(Clone, Copy, PartialEq, Debug)] enum SemiColonMode { Break, Ignore, } #[derive(Clone, Copy, PartialEq, Debug)] enum BlockMode { Break, Ignore, } /// Possibly accept an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of /// macro expansion). Placement of these is not as complex as I feared it would /// be. The important thing is to make sure that lookahead doesn't balk at /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = $p.token.clone() { match nt.0 { token::NtExpr(ref e) | token::NtLiteral(ref e) => { $p.bump(); return Ok((*e).clone()); } token::NtPath(ref path) => { $p.bump(); let span = $p.span; let kind = ExprKind::Path(None, (*path).clone()); return Ok($p.mk_expr(span, kind, ThinVec::new())); } token::NtBlock(ref block) => { $p.bump(); let span = $p.span; let kind = ExprKind::Block((*block).clone(), None); return Ok($p.mk_expr(span, kind, ThinVec::new())); } _ => {}, }; } } } /// As maybe_whole_expr, but for things other than expressions macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { if let token::Interpolated(nt) = $p.token.clone() { if let token::$constructor($x) = nt.0.clone() { $p.bump(); return Ok($e); } } }; } fn maybe_append(mut lhs: Vec<Attribute>, mut rhs: Option<Vec<Attribute>>) -> Vec<Attribute> { if let Some(ref mut rhs) = rhs { lhs.append(rhs); } lhs } #[derive(Debug, Clone, Copy, PartialEq)] enum PrevTokenKind { DocComment, Comma, Plus, Interpolated, Eof, Ident, Other, } trait RecoverQPath: Sized { const PATH_STYLE: PathStyle = PathStyle::Expr; fn to_ty(&self) -> Option<P<Ty>>; fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self; fn to_string(&self) -> String; } impl RecoverQPath for Ty { const PATH_STYLE: PathStyle = PathStyle::Type; fn to_ty(&self) -> Option<P<Ty>> { Some(P(self.clone())) } fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self { Self { span: path.span, node: TyKind::Path(qself, path), id: self.id } } fn to_string(&self) -> String { pprust::ty_to_string(self) } } impl RecoverQPath for Pat { fn to_ty(&self) -> Option<P<Ty>> { self.to_ty() } fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self { Self { span: path.span, node: PatKind::Path(qself, path), id: self.id } } fn to_string(&self) -> String { pprust::pat_to_string(self) } } impl RecoverQPath for Expr { fn to_ty(&self) -> Option<P<Ty>> { self.to_ty() } fn to_recovered(&self, qself: Option<QSelf>, path: ast::Path) -> Self { Self { span: path.span, node: ExprKind::Path(qself, path), id: self.id, attrs: self.attrs.clone() } } fn to_string(&self) -> String { pprust::expr_to_string(self) } } /* ident is handled by common.rs */ #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, /// the current token: pub token: token::Token, /// the span of the current token: pub span: Span, /// the span of the previous token: meta_var_span: Option<Span>, pub prev_span: Span, /// the previous token kind prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files crate directory: Directory<'a>, /// Whether to parse sub-modules in other files. pub recurse_into_file_modules: bool, /// Name of the root module this parser originated from. If `None`, then the /// name is not known. This does not change while the parser is descending /// into modules, and sub-parsers have new values for this name. pub root_module_name: Option<String>, crate expected_tokens: Vec<TokenType>, token_cursor: TokenCursor, desugar_doc_comments: bool, /// Whether we should configure out of line modules as we parse. pub cfg_mods: bool, } #[derive(Clone)] struct TokenCursor { frame: TokenCursorFrame, stack: Vec<TokenCursorFrame>, } #[derive(Clone)] struct TokenCursorFrame { delim: token::DelimToken, span: Span, open_delim: bool, tree_cursor: tokenstream::Cursor, close_delim: bool, last_token: LastToken, } /// This is used in `TokenCursorFrame` above to track tokens that are consumed /// by the parser, and then that's transitively used to record the tokens that /// each parse AST item is created with. /// /// Right now this has two states, either collecting tokens or not collecting /// tokens. If we're collecting tokens we just save everything off into a local /// `Vec`. This should eventually though likely save tokens from the original /// token stream and just use slicing of token streams to avoid creation of a /// whole new vector. /// /// The second state is where we're passively not recording tokens, but the last /// token is still tracked for when we want to start recording tokens. This /// "last token" means that when we start recording tokens we'll want to ensure /// that this, the first token, is included in the output. /// /// You can find some more example usage of this in the `collect_tokens` method /// on the parser. #[derive(Clone)] enum LastToken { Collecting(Vec<TokenStream>), Was(Option<TokenStream>), } impl TokenCursorFrame { fn new(sp: Span, delimited: &Delimited) -> Self { TokenCursorFrame { delim: delimited.delim, span: sp, open_delim: delimited.delim == token::NoDelim, tree_cursor: delimited.stream().into_trees(), close_delim: delimited.delim == token::NoDelim, last_token: LastToken::Was(None), } } } impl TokenCursor { fn next(&mut self) -> TokenAndSpan { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } .open_tt(self.frame.span) } else if let Some(tree) = self.frame.tree_cursor.next() { tree } else if !self.frame.close_delim { self.frame.close_delim = true; Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } .close_tt(self.frame.span) } else if let Some(frame) = self.stack.pop() { self.frame = frame; continue } else { return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP } }; match self.frame.last_token { LastToken::Collecting(ref mut v) => v.push(tree.clone().into()), LastToken::Was(ref mut t) => *t = Some(tree.clone().into()), } match tree { TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, TokenTree::Delimited(sp, ref delimited) => { let frame = TokenCursorFrame::new(sp, delimited); self.stack.push(mem::replace(&mut self.frame, frame)); } } } } fn next_desugared(&mut self) -> TokenAndSpan { let (sp, name) = match self.next() { TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), tok => return tok, }; let stripped = strip_doc_comment_decoration(&name.as_str()); // Searches for the occurrences of `"#*` and returns the minimum number of `#`s // required to wrap the text. let mut num_of_hashes = 0; let mut count = 0; for ch in stripped.chars() { count = match ch { '"' => 1, '#' if count > 0 => count + 1, _ => 0, }; num_of_hashes = cmp::max(num_of_hashes, count); } let body = TokenTree::Delimited(sp, Delimited { delim: token::Bracket, tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))] .iter().cloned().collect::<TokenStream>().into(), }); self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited { delim: token::NoDelim, tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner { [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] .iter().cloned().collect::<TokenStream>().into() } else { [TokenTree::Token(sp, token::Pound), body] .iter().cloned().collect::<TokenStream>().into() }, }))); self.next() } } #[derive(Clone, PartialEq)] crate enum TokenType { Token(token::Token), Keyword(keywords::Keyword), Operator, Lifetime, Ident, Path, Type, } impl TokenType { fn to_string(&self) -> String { match *self { TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)), TokenType::Keyword(kw) => format!("`{}`", kw.name()), TokenType::Operator => "an operator".to_string(), TokenType::Lifetime => "lifetime".to_string(), TokenType::Ident => "identifier".to_string(), TokenType::Path => "path".to_string(), TokenType::Type => "type".to_string(), } } } /// Returns true if `IDENT t` can start a type - `IDENT::a::b`, `IDENT<u8, u8>`, /// `IDENT<<u8 as Trait>::AssocTy>`. /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that IDENT is not the ident of a fn trait fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } /// Information about the path to a module. pub struct ModulePath { name: String, path_exists: bool, pub result: Result<ModulePathSuccess, Error>, } pub struct ModulePathSuccess { pub path: PathBuf, pub directory_ownership: DirectoryOwnership, warn: bool, } pub enum Error { FileNotFoundForModule { mod_name: String, default_path: String, secondary_path: String, dir_path: String, }, DuplicatePaths { mod_name: String, default_path: String, secondary_path: String, }, UselessDocComment, InclusiveRangeWithNoEnd, } impl Error { fn span_err<S: Into<MultiSpan>>(self, sp: S, handler: &errors::Handler) -> DiagnosticBuilder { match self { Error::FileNotFoundForModule { ref mod_name, ref default_path, ref secondary_path, ref dir_path } => { let mut err = struct_span_err!(handler, sp, E0583, "file not found for module `{}`", mod_name); err.help(&format!("name the file either {} or {} inside the directory \"{}\"", default_path, secondary_path, dir_path)); err } Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { let mut err = struct_span_err!(handler, sp, E0584, "file for module `{}` found at both {} and {}", mod_name, default_path, secondary_path); err.help("delete or rename one of them to remove the ambiguity"); err } Error::UselessDocComment => { let mut err = struct_span_err!(handler, sp, E0585, "found a documentation comment that doesn't document anything"); err.help("doc comments must come before what they document, maybe a comment was \ intended with `//`?"); err } Error::InclusiveRangeWithNoEnd => { let mut err = struct_span_err!(handler, sp, E0586, "inclusive range with no end"); err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); err } } } } #[derive(Debug)] enum LhsExpr { NotYetParsed, AttributesParsed(ThinVec<Attribute>), AlreadyParsed(P<Expr>), } impl From<Option<ThinVec<Attribute>>> for LhsExpr { fn from(o: Option<ThinVec<Attribute>>) -> Self { if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed } } } impl From<P<Expr>> for LhsExpr { fn from(expr: P<Expr>) -> Self { LhsExpr::AlreadyParsed(expr) } } /// Create a placeholder argument. fn dummy_arg(span: Span) -> Arg { let ident = Ident::new(keywords::Invalid.name(), span); let pat = P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), span, }); let ty = Ty { node: TyKind::Err, span, id: ast::DUMMY_NODE_ID }; Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID } } #[derive(Copy, Clone, Debug)] enum TokenExpectType { Expect, NoExpect, } impl<'a> Parser<'a> { pub fn new(sess: &'a ParseSess, tokens: TokenStream, directory: Option<Directory<'a>>, recurse_into_file_modules: bool, desugar_doc_comments: bool) -> Self { let mut parser = Parser { sess, token: token::Whitespace, span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), recurse_into_file_modules, directory: Directory { path: Cow::from(PathBuf::new()), ownership: DirectoryOwnership::Owned { relative: None } }, root_module_name: None, expected_tokens: Vec::new(), token_cursor: TokenCursor { frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited { delim: token::NoDelim, tts: tokens.into(), }), stack: Vec::new(), }, desugar_doc_comments, cfg_mods: true, }; let tok = parser.next_tok(); parser.token = tok.tok; parser.span = tok.sp; if let Some(directory) = directory { parser.directory = directory; } else if !parser.span.is_dummy() { if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) { path.pop(); parser.directory.path = Cow::from(path); } } parser.process_potential_macro_variable(); parser } fn next_tok(&mut self) -> TokenAndSpan { let mut next = if self.desugar_doc_comments { self.token_cursor.next_desugared() } else { self.token_cursor.next() }; if next.sp.is_dummy() { // Tweak the location for better diagnostics, but keep syntactic context intact. next.sp = self.prev_span.with_ctxt(next.sp.ctxt()); } next } /// Convert the current token to a string using self's reader pub fn this_token_to_string(&self) -> String { pprust::token_to_string(&self.token) } fn token_descr(&self) -> Option<&'static str> { Some(match &self.token { t if t.is_special_ident() => "reserved identifier", t if t.is_used_keyword() => "keyword", t if t.is_unused_keyword() => "reserved keyword", _ => return None, }) } fn this_token_descr(&self) -> String { if let Some(prefix) = self.token_descr() { format!("{} `{}`", prefix, self.this_token_to_string()) } else { format!("`{}`", self.this_token_to_string()) } } fn unexpected_last<T>(&self, t: &token::Token) -> PResult<'a, T> { let token_str = pprust::token_to_string(t); Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str))) } crate fn unexpected<T>(&mut self) -> PResult<'a, T> { match self.expect_one_of(&[], &[]) { Err(e) => Err(e), Ok(_) => unreachable!(), } } /// Expect and consume the token t. Signal an error if /// the next token is not t. pub fn expect(&mut self, t: &token::Token) -> PResult<'a, ()> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); Ok(()) } else { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_to_string(); let mut err = self.fatal(&format!("expected `{}`, found `{}`", token_str, this_token_str)); let sp = if self.token == token::Token::Eof { // EOF, don't want to point at the following char, but rather the last token self.prev_span } else { self.sess.source_map().next_point(self.prev_span) }; let label_exp = format!("expected `{}`", token_str); let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content // between them is whitespace, point only at the found token. err.span_label(self.span, label_exp); } _ => { err.span_label(sp, label_exp); err.span_label(self.span, "unexpected token"); } } Err(err) } } else { self.expect_one_of(slice::from_ref(t), &[]) } } /// Expect next token to be edible or inedible token. If edible, /// then consume it; if inedible, then return without consuming /// anything. Signal a fatal error if next token is unexpected. fn expect_one_of(&mut self, edible: &[token::Token], inedible: &[token::Token]) -> PResult<'a, ()>{ fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. let b = i.next() .map_or("".to_string(), |t| t.to_string()); i.enumerate().fold(b, |mut b, (i, a)| { if tokens.len() > 2 && i == tokens.len() - 2 { b.push_str(", or "); } else if tokens.len() == 2 && i == tokens.len() - 2 { b.push_str(" or "); } else { b.push_str(", "); } b.push_str(&a.to_string()); b }) } if edible.contains(&self.token) { self.bump(); Ok(()) } else if inedible.contains(&self.token) { // leave it in the input Ok(()) } else { let mut expected = edible.iter() .map(|x| TokenType::Token(x.clone())) .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) .chain(self.expected_tokens.iter().cloned()) .collect::<Vec<_>>(); expected.sort_by_cached_key(|x| x.to_string()); expected.dedup(); let expect = tokens_to_string(&expected[..]); let actual = self.this_token_to_string(); let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { let short_expect = if expected.len() > 6 { format!("{} possible tokens", expected.len()) } else { expect.clone() }; (format!("expected one of {}, found `{}`", expect, actual), (self.sess.source_map().next_point(self.prev_span), format!("expected one of {} here", short_expect))) } else if expected.is_empty() { (format!("unexpected token: `{}`", actual), (self.prev_span, "unexpected token after this".to_string())) } else { (format!("expected {}, found `{}`", expect, actual), (self.sess.source_map().next_point(self.prev_span), format!("expected {} here", expect))) }; let mut err = self.fatal(&msg_exp); let sp = if self.token == token::Token::Eof { // This is EOF, don't want to point at the following char, but rather the last token self.prev_span } else { label_sp }; let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content between // them is whitespace, point at the found token in that case: // // X | () => { syntax error }; // | ^^^^^ expected one of 8 possible tokens here // // instead of having: // // X | () => { syntax error }; // | -^^^^^ unexpected token // | | // | expected one of 8 possible tokens here err.span_label(self.span, label_exp); } _ => { err.span_label(sp, label_exp); err.span_label(self.span, "unexpected token"); } } Err(err) } } /// returns the span of expr, if it was not interpolated or the span of the interpolated token fn interpolated_or_expr_span(&self, expr: PResult<'a, P<Expr>>) -> PResult<'a, (Span, P<Expr>)> { expr.map(|e| { if self.prev_token_kind == PrevTokenKind::Interpolated { (self.prev_span, e) } else { (e.span, e) } }) } fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { let mut err = self.struct_span_err(self.span, &format!("expected identifier, found {}", self.this_token_descr())); if let Some(token_descr) = self.token_descr() { err.span_label(self.span, format!("expected identifier, found {}", token_descr)); } else { err.span_label(self.span, "expected identifier"); if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { err.span_suggestion(self.span, "remove this comma", "".into()); } } err } pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { self.parse_ident_common(true) } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { match self.token { token::Ident(ident, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { err.emit(); } else { return Err(err); } } let span = self.span; self.bump(); Ok(Ident::new(ident.name, span)) } _ => { Err(if self.prev_token_kind == PrevTokenKind::DocComment { self.span_fatal_err(self.prev_span, Error::UselessDocComment) } else { self.expected_ident_found() }) } } } /// Check if the next token is `tok`, and return `true` if so. /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. crate fn check(&mut self, tok: &token::Token) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consume token 'tok' if it exists. Returns true if the given /// token was present, false otherwise. pub fn eat(&mut self, tok: &token::Token) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present } fn check_keyword(&mut self, kw: keywords::Keyword) -> bool { self.expected_tokens.push(TokenType::Keyword(kw)); self.token.is_keyword(kw) } /// If the next token is the given keyword, eat it and return /// true. Otherwise, return false. pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool { if self.check_keyword(kw) { self.bump(); true } else { false } } fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool { if self.token.is_keyword(kw) { self.bump(); true } else { false } } /// If the given word is not a keyword, signal an error. /// If the next token is not the given word, signal an error. /// Otherwise, eat it. fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> { if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) } } fn check_ident(&mut self) -> bool { if self.token.is_ident() { true } else { self.expected_tokens.push(TokenType::Ident); false } } fn check_path(&mut self) -> bool { if self.token.is_path_start() { true } else { self.expected_tokens.push(TokenType::Path); false } } fn check_type(&mut self) -> bool { if self.token.can_begin_type() { true } else { self.expected_tokens.push(TokenType::Type); false } } /// Expect and consume a `+`. if `+=` is seen, replace it with a `=` /// and continue. If a `+` is not seen, return false. /// /// This is using when token splitting += into +. /// See issue 47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); match self.token { token::BinOp(token::Plus) => { self.bump(); true } token::BinOpEq(token::Plus) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); self.bump_with(token::Eq, span); true } _ => false, } } /// Checks to see if the next token is either `+` or `+=`. /// Otherwise returns false. fn check_plus(&mut self) -> bool { if self.token.is_like_plus() { true } else { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); false } } /// Expect and consume an `&`. If `&&` is seen, replace it with a single /// `&` and continue. If an `&` is not seen, signal an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); match self.token { token::BinOp(token::And) => { self.bump(); Ok(()) } token::AndAnd => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::And), span)) } _ => self.unexpected() } } /// Expect and consume an `|`. If `||` is seen, replace it with a single /// `|` and continue. If an `|` is not seen, signal an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); match self.token { token::BinOp(token::Or) => { self.bump(); Ok(()) } token::OrOr => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::Or), span)) } _ => self.unexpected() } } fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) { match suffix { None => {/* everything ok */} Some(suf) => { let text = suf.as_str(); if text.is_empty() { self.span_bug(sp, "found empty literal suffix in Some") } self.span_err(sp, &format!("{} with a suffix is invalid", kind)); } } } /// Attempt to consume a `<`. If `<<` is seen, replace it with a single /// `<` and continue. If a `<` is not seen, return false. /// /// This is meant to be used when parsing generics on a path to get the /// starting token. fn eat_lt(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::Lt)); match self.token { token::Lt => { self.bump(); true } token::BinOp(token::Shl) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); self.bump_with(token::Lt, span); true } _ => false, } } fn expect_lt(&mut self) -> PResult<'a, ()> { if !self.eat_lt() { self.unexpected() } else { Ok(()) } } /// Expect and consume a GT. if a >> is seen, replace it /// with a single > and continue. If a GT is not seen, /// signal an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); match self.token { token::Gt => { self.bump(); Ok(()) } token::BinOp(token::Shr) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::Gt, span)) } token::BinOpEq(token::Shr) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::Ge, span)) } token::Ge => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::Eq, span)) } _ => self.unexpected() } } /// Eat and discard tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. fn eat_to_tokens(&mut self, kets: &[&token::Token]) { let handler = self.diagnostic(); if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| Ok(p.parse_token_tree())) { handler.cancel(err); } } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end<T, F>(&mut self, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Vec<T>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { let val = self.parse_seq_to_before_end(ket, sep, f)?; self.bump(); Ok(val) } /// Parse a sequence, not including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_before_end<T, F>(&mut self, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Vec<T>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } fn parse_seq_to_before_tokens<T, F>( &mut self, kets: &[&token::Token], sep: SeqSep, expect: TokenExpectType, mut f: F, ) -> PResult<'a, Vec<T>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { let mut first: bool = true; let mut v = vec![]; while !kets.iter().any(|k| { match expect { TokenExpectType::Expect => self.check(k), TokenExpectType::NoExpect => self.token == **k, } }) { match self.token { token::CloseDelim(..) | token::Eof => break, _ => {} }; if let Some(ref t) = sep.sep { if first { first = false; } else { if let Err(mut e) = self.expect(t) { // Attempt to keep parsing if it was a similar separator if let Some(ref tokens) = t.similar_tokens() { if tokens.contains(&self.token) { self.bump(); } } e.emit(); // Attempt to keep parsing if it was an omitted separator match f(self) { Ok(t) => { v.push(t); continue; }, Err(mut e) => { e.cancel(); break; } } } } } if sep.trailing_sep_allowed && kets.iter().any(|k| { match expect { TokenExpectType::Expect => self.check(k), TokenExpectType::NoExpect => self.token == **k, } }) { break; } let t = f(self)?; v.push(t); } Ok(v) } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. fn parse_unspanned_seq<T, F>(&mut self, bra: &token::Token, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Vec<T>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { self.expect(bra)?; let result = self.parse_seq_to_before_end(ket, sep, f)?; if self.token == *ket { self.bump(); } Ok(result) } /// Advance the parser by one token pub fn bump(&mut self) { if self.prev_token_kind == PrevTokenKind::Eof { // Bumping after EOF is a bad sign, usually an infinite loop. self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token { token::DocComment(..) => PrevTokenKind::DocComment, token::Comma => PrevTokenKind::Comma, token::BinOp(token::Plus) => PrevTokenKind::Plus, token::Interpolated(..) => PrevTokenKind::Interpolated, token::Eof => PrevTokenKind::Eof, token::Ident(..) => PrevTokenKind::Ident, _ => PrevTokenKind::Other, }; let next = self.next_tok(); self.span = next.sp; self.token = next.tok; self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); } /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. fn bump_with(&mut self, next: token::Token, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; self.span = span; self.token = next; self.expected_tokens.clear(); } pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where F: FnOnce(&token::Token) -> R, { if dist == 0 { return f(&self.token) } f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { TokenTree::Token(_, tok) => tok, TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), }, None => token::CloseDelim(self.token_cursor.frame.delim), }) } fn look_ahead_span(&self, dist: usize) -> Span { if dist == 0 { return self.span } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(TokenTree::Token(span, _)) | Some(TokenTree::Delimited(span, _)) => span, None => self.look_ahead_span(dist - 1), } } pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(self.span, m) } pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(sp, m) } fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { err.span_err(sp, self.diagnostic()) } fn bug(&self, m: &str) -> ! { self.sess.span_diagnostic.span_bug(self.span, m) } fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) { self.sess.span_diagnostic.span_err(sp, m) } fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_err(sp, m) } crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! { self.sess.span_diagnostic.span_bug(sp, m) } crate fn abort_if_errors(&self) { self.sess.span_diagnostic.abort_if_errors(); } fn cancel(&self, err: &mut DiagnosticBuilder) { self.sess.span_diagnostic.cancel(err) } crate fn diagnostic(&self) -> &'a errors::Handler { &self.sess.span_diagnostic } /// Is the current token one of the keywords that signals a bare function /// type? fn token_is_bare_fn_keyword(&mut self) -> bool { self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) || self.check_keyword(keywords::Extern) && self.is_extern_non_path() } /// parse a TyKind::BareFn type: fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> { /* [unsafe] [extern "ABI"] fn (S) -> T ^~~~^ ^~~~^ ^~^ ^ | | | | | | | Return type | | Argument types | | | ABI Function Style */ let unsafety = self.parse_unsafety(); let abi = if self.eat_keyword(keywords::Extern) { self.parse_opt_abi()?.unwrap_or(Abi::C) } else { Abi::Rust }; self.expect_keyword(keywords::Fn)?; let (inputs, variadic) = self.parse_fn_args(false, true)?; let ret_ty = self.parse_ret_ty(false)?; let decl = P(FnDecl { inputs, output: ret_ty, variadic, }); Ok(TyKind::BareFn(P(BareFnTy { abi, unsafety, generic_params, decl, }))) } /// Parse asyncness: `async` or nothing fn parse_asyncness(&mut self) -> IsAsync { if self.eat_keyword(keywords::Async) { IsAsync::Async { closure_id: ast::DUMMY_NODE_ID, return_impl_trait_id: ast::DUMMY_NODE_ID, } } else { IsAsync::NotAsync } } /// Parse unsafety: `unsafe` or nothing. fn parse_unsafety(&mut self) -> Unsafety { if self.eat_keyword(keywords::Unsafe) { Unsafety::Unsafe } else { Unsafety::Normal } } /// Parse the items in a trait declaration pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> { maybe_whole!(self, NtTraitItem, |x| x); let attrs = self.parse_outer_attributes()?; let (mut item, tokens) = self.collect_tokens(|this| { this.parse_trait_item_(at_end, attrs) })?; // See `parse_item` for why this clause is here. if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { item.tokens = Some(tokens); } Ok(item) } fn parse_trait_item_(&mut self, at_end: &mut bool, mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> { let lo = self.span; let (name, node, generics) = if self.eat_keyword(keywords::Type) { self.parse_trait_item_assoc_ty()? } else if self.is_const_item() { self.expect_keyword(keywords::Const)?; let ident = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; let default = if self.check(&token::Eq) { self.bump(); let expr = self.parse_expr()?; self.expect(&token::Semi)?; Some(expr) } else { self.expect(&token::Semi)?; None }; (ident, TraitItemKind::Const(ty, default), ast::Generics::default()) } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? { // trait item macro. (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default()) } else { let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?; let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| { // This is somewhat dubious; We don't want to allow // argument names to be left off if there is a // definition... p.parse_arg_general(false) })?; generics.where_clause = self.parse_where_clause()?; let sig = ast::MethodSig { header: FnHeader { unsafety, constness, abi, asyncness, }, decl: d, }; let body = match self.token { token::Semi => { self.bump(); *at_end = true; debug!("parse_trait_methods(): parsing required method"); None } token::OpenDelim(token::Brace) => { debug!("parse_trait_methods(): parsing provided method"); *at_end = true; let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(inner_attrs.iter().cloned()); Some(body) } _ => { let token_str = self.this_token_to_string(); let mut err = self.fatal(&format!("expected `;` or `{{`, found `{}`", token_str)); err.span_label(self.span, "expected `;` or `{`"); return Err(err); } }; (ident, ast::TraitItemKind::Method(sig, body), generics) }; Ok(TraitItem { id: ast::DUMMY_NODE_ID, ident: name, attrs, generics, node, span: lo.to(self.prev_span), tokens: None, }) } /// Parse optional return type [ -> TY ] in function decl fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?)) } else { Ok(FunctionRetTy::Default(self.span.shrink_to_lo())) } } // Parse a type pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> { self.parse_ty_common(true, true) } /// Parse a type in restricted contexts where `+` is not permitted. /// Example 1: `&'a TYPE` /// `+` is prohibited to maintain operator priority (P(+) < P(&)). /// Example 2: `value1 as TYPE + value2` /// `+` is prohibited to avoid interactions with expression grammar. fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> { self.parse_ty_common(false, true) } fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool) -> PResult<'a, P<Ty>> { maybe_whole!(self, NtTy, |x| x); let lo = self.span; let mut impl_dyn_multi = false; let node = if self.eat(&token::OpenDelim(token::Paren)) { // `(TYPE)` is a parenthesized type. // `(TYPE,)` is a tuple with a single field of type TYPE. let mut ts = vec![]; let mut last_comma = false; while self.token != token::CloseDelim(token::Paren) { ts.push(self.parse_ty()?); if self.eat(&token::Comma) { last_comma = true; } else { last_comma = false; break; } } let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus; self.expect(&token::CloseDelim(token::Paren))?; if ts.len() == 1 && !last_comma { let ty = ts.into_iter().nth(0).unwrap().into_inner(); let maybe_bounds = allow_plus && self.token.is_like_plus(); match ty.node { // `(TY_BOUND_NOPAREN) + BOUND + ...`. TyKind::Path(None, ref path) if maybe_bounds => { self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)? } TyKind::TraitObject(ref bounds, TraitObjectSyntax::None) if maybe_bounds && bounds.len() == 1 && !trailing_plus => { let path = match bounds[0] { GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(), _ => self.bug("unexpected lifetime bound"), }; self.parse_remaining_bounds(Vec::new(), path, lo, true)? } // `(TYPE)` _ => TyKind::Paren(P(ty)) } } else { TyKind::Tup(ts) } } else if self.eat(&token::Not) { // Never type `!` TyKind::Never } else if self.eat(&token::BinOp(token::Star)) { // Raw pointer TyKind::Ptr(self.parse_ptr()?) } else if self.eat(&token::OpenDelim(token::Bracket)) { // Array or slice let t = self.parse_ty()?; // Parse optional `; EXPR` in `[TYPE; EXPR]` let t = match self.maybe_parse_fixed_length_of_vec()? { None => TyKind::Slice(t), Some(length) => TyKind::Array(t, AnonConst { id: ast::DUMMY_NODE_ID, value: length, }), }; self.expect(&token::CloseDelim(token::Bracket))?; t } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) { // Reference self.expect_and()?; self.parse_borrowed_pointee()? } else if self.eat_keyword_noexpect(keywords::Typeof) { // `typeof(EXPR)` // In order to not be ambiguous, the type must be surrounded by parens. self.expect(&token::OpenDelim(token::Paren))?; let e = AnonConst { id: ast::DUMMY_NODE_ID, value: self.parse_expr()?, }; self.expect(&token::CloseDelim(token::Paren))?; TyKind::Typeof(e) } else if self.eat_keyword(keywords::Underscore) { // A type to be inferred `_` TyKind::Infer } else if self.token_is_bare_fn_keyword() { // Function pointer type self.parse_ty_bare_fn(Vec::new())? } else if self.check_keyword(keywords::For) { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.token_is_bare_fn_keyword() { self.parse_ty_bare_fn(lifetime_defs)? } else { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus && self.check_plus(); self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)? } } else if self.eat_keyword(keywords::Impl) { // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds()?; impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds) } else if self.check_keyword(keywords::Dyn) && self.look_ahead(1, |t| t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t)) { self.bump(); // `dyn` // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds()?; impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn) } else if self.check(&token::Question) || self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) { // Bound list (trait object type) TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus)?, TraitObjectSyntax::None) } else if self.eat_lt() { // Qualified path let (qself, path) = self.parse_qpath(PathStyle::Type)?; TyKind::Path(Some(qself), path) } else if self.token.is_path_start() { // Simple path let path = self.parse_path(PathStyle::Type)?; if self.eat(&token::Not) { // Macro invocation in type position let (delim, tts) = self.expect_delimited_token_tree()?; let node = Mac_ { path, tts, delim }; TyKind::Mac(respan(lo.to(self.prev_span), node)) } else { // Just a type path or bound list (trait object type) starting with a trait. // `Type` // `Trait1 + Trait2 + 'a` if allow_plus && self.check_plus() { self.parse_remaining_bounds(Vec::new(), path, lo, true)? } else { TyKind::Path(None, path) } } } else { let msg = format!("expected type, found {}", self.this_token_descr()); return Err(self.fatal(&msg)); }; let span = lo.to(self.prev_span); let ty = Ty { node, span, id: ast::DUMMY_NODE_ID }; // Try to recover from use of `+` with incorrect priority. self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty); self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?; let ty = self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)?; Ok(P(ty)) } fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path, lo: Span, parse_plus: bool) -> PResult<'a, TyKind> { let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span)); let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)]; if parse_plus { self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded bounds.append(&mut self.parse_generic_bounds()?); } Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None)) } fn maybe_report_ambiguous_plus(&mut self, allow_plus: bool, impl_dyn_multi: bool, ty: &Ty) { if !allow_plus && impl_dyn_multi { let sum_with_parens = format!("({})", pprust::ty_to_string(&ty)); self.struct_span_err(ty.span, "ambiguous `+` in a type") .span_suggestion_with_applicability( ty.span, "use parentheses to disambiguate", sum_with_parens, Applicability::MachineApplicable ).emit(); } } fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PResult<'a, ()> { // Do not add `+` to expected tokens. if !allow_plus || !self.token.is_like_plus() { return Ok(()) } self.bump(); // `+` let bounds = self.parse_generic_bounds()?; let sum_span = ty.span.to(self.prev_span); let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178, "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty)); match ty.node { TyKind::Rptr(ref lifetime, ref mut_ty) => { let sum_with_parens = pprust::to_string(|s| { use print::pprust::PrintState; s.s.word("&")?; s.print_opt_lifetime(lifetime)?; s.print_mutability(mut_ty.mutbl)?; s.popen()?; s.print_type(&mut_ty.ty)?; s.print_type_bounds(" +", &bounds)?; s.pclose() }); err.span_suggestion_with_applicability( sum_span, "try adding parentheses", sum_with_parens, Applicability::MachineApplicable ); } TyKind::Ptr(..) | TyKind::BareFn(..) => { err.span_label(sum_span, "perhaps you forgot parentheses?"); } _ => { err.span_label(sum_span, "expected a path"); }, } err.emit(); Ok(()) } // Try to recover from associated item paths like `[T]::AssocItem`/`(T, U)::AssocItem`. fn maybe_recover_from_bad_qpath<T: RecoverQPath>(&mut self, base: T, allow_recovery: bool) -> PResult<'a, T> { // Do not add `::` to expected tokens. if !allow_recovery || self.token != token::ModSep { return Ok(base); } let ty = match base.to_ty() { Some(ty) => ty, None => return Ok(base), }; self.bump(); // `::` let mut segments = Vec::new(); self.parse_path_segments(&mut segments, T::PATH_STYLE, true)?; let span = ty.span.to(self.prev_span); let path_span = span.to(span); // use an empty path since `position` == 0 let recovered = base.to_recovered( Some(QSelf { ty, path_span, position: 0 }), ast::Path { segments, span }, ); self.diagnostic() .struct_span_err(span, "missing angle brackets in associated item path") .span_suggestion_with_applicability( // this is a best-effort recovery span, "try", recovered.to_string(), Applicability::MaybeIncorrect ).emit(); Ok(recovered) } fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> { let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None }; let mutbl = self.parse_mutability(); let ty = self.parse_ty_no_plus()?; return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl })); } fn parse_ptr(&mut self) -> PResult<'a, MutTy> { let mutbl = if self.eat_keyword(keywords::Mut) { Mutability::Mutable } else if self.eat_keyword(keywords::Const) { Mutability::Immutable } else { let span = self.prev_span; self.span_err(span, "expected mut or const in raw pointer type (use \ `*mut T` or `*const T` as appropriate)"); Mutability::Immutable }; let t = self.parse_ty_no_plus()?; Ok(MutTy { ty: t, mutbl: mutbl }) } fn is_named_argument(&mut self) -> bool { let offset = match self.token { token::Interpolated(ref nt) => match nt.0 { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, } token::BinOp(token::And) | token::AndAnd => 1, _ if self.token.is_keyword(keywords::Mut) => 1, _ => 0, }; self.look_ahead(offset, |t| t.is_ident()) && self.look_ahead(offset + 1, |t| t == &token::Colon) } /// This version of parse arg doesn't necessarily require /// identifier names. fn parse_arg_general(&mut self, require_name: bool) -> PResult<'a, Arg> { maybe_whole!(self, NtArg, |x| x); let (pat, ty) = if require_name || self.is_named_argument() { debug!("parse_arg_general parse_pat (require_name:{})", require_name); let pat = self.parse_pat()?; self.expect(&token::Colon)?; (pat, self.parse_ty()?) } else { debug!("parse_arg_general ident_to_pat"); let parser_snapshot_before_pat = self.clone(); // We're going to try parsing the argument as a pattern (even though it's not // allowed). This way we can provide better errors to the user. let pat_arg: PResult<'a, _> = { let pat = self.parse_pat()?; self.expect(&token::Colon)?; Ok((pat, self.parse_ty()?)) }; match pat_arg { Ok((pat, ty)) => { let mut err = self.diagnostic().struct_span_err_with_code( pat.span, "patterns aren't allowed in methods without bodies", DiagnosticId::Error("E0642".into()), ); err.span_suggestion_short_with_applicability( pat.span, "give this argument a name or use an underscore to ignore it", "_".to_owned(), Applicability::MachineApplicable, ); err.emit(); // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. let pat = P(Pat { node: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID }); (pat, ty) } Err(mut err) => { err.cancel(); // Recover from attempting to parse the argument as a pattern. This means // the type is alone, with no name, e.g. `fn foo(u32)`. mem::replace(self, parser_snapshot_before_pat); debug!("parse_arg_general ident_to_pat"); let ident = Ident::new(keywords::Invalid.name(), self.prev_span); let ty = self.parse_ty()?; let pat = P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident( BindingMode::ByValue(Mutability::Immutable), ident, None), span: ty.span, }); (pat, ty) } } }; Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID }) } /// Parse a single function argument crate fn parse_arg(&mut self) -> PResult<'a, Arg> { self.parse_arg_general(true) } /// Parse an argument in a lambda header e.g. |arg, arg| fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { let pat = self.parse_pat()?; let t = if self.eat(&token::Colon) { self.parse_ty()? } else { P(Ty { id: ast::DUMMY_NODE_ID, node: TyKind::Infer, span: self.span, }) }; Ok(Arg { ty: t, pat, id: ast::DUMMY_NODE_ID }) } fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> { if self.eat(&token::Semi) { Ok(Some(self.parse_expr()?)) } else { Ok(None) } } /// Matches token_lit = LIT_INTEGER | ... fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { ExprKind::Lit(ref lit) => { lit.node.clone() } _ => { return self.unexpected_last(&self.token); } }, _ => { return self.unexpected_last(&self.token); } }, token::Literal(lit, suf) => { let diag = Some((self.span, &self.sess.span_diagnostic)); let (suffix_illegal, result) = parse::lit_token(lit, suf, diag); if suffix_illegal { let sp = self.span; self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf) } result.unwrap() } _ => { return self.unexpected_last(&self.token); } }; self.bump(); Ok(out) } /// Matches lit = true | false | token_lit crate fn parse_lit(&mut self) -> PResult<'a, Lit> { let lo = self.span; let lit = if self.eat_keyword(keywords::True) { LitKind::Bool(true) } else if self.eat_keyword(keywords::False) { LitKind::Bool(false) } else { let lit = self.parse_lit_token()?; lit }; Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) }) } /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat) crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { maybe_whole_expr!(self); let minus_lo = self.span; let minus_present = self.eat(&token::BinOp(token::Minus)); let lo = self.span; let literal = P(self.parse_lit()?); let hi = self.prev_span; let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); if minus_present { let minus_hi = self.prev_span; let unary = self.mk_unary(UnOp::Neg, expr); Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new())) } else { Ok(expr) } } fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token { token::Ident(ident, _) if self.token.is_path_segment_keyword() => { let span = self.span; self.bump(); Ok(Ident::new(ident.name, span)) } _ => self.parse_ident(), } } /// Parses qualified path. /// Assumes that the leading `<` has been parsed already. /// /// `qualified_path = <type [as trait_ref]>::path` /// /// # Examples /// `<T>::default` /// `<T as U>::a` /// `<T as U>::F::a<S>` (without disambiguator) /// `<T as U>::F::a::<S>` (with disambiguator) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> { let lo = self.prev_span; let ty = self.parse_ty()?; // `path` will contain the prefix of the path up to the `>`, // if any (e.g., `U` in the `<T as U>::*` examples // above). `path_span` has the span of that path, or an empty // span in the case of something like `<T>::Bar`. let (mut path, path_span); if self.eat_keyword(keywords::As) { let path_lo = self.span; path = self.parse_path(PathStyle::Type)?; path_span = path_lo.to(self.prev_span); } else { path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP }; path_span = self.span.to(self.span); } self.expect(&token::Gt)?; self.expect(&token::ModSep)?; let qself = QSelf { ty, path_span, position: path.segments.len() }; self.parse_path_segments(&mut path.segments, style, true)?; Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) })) } /// Parses simple paths. /// /// `path = [::] segment+` /// `segment = ident | ident[::]<args> | ident[::](args) [-> type]` /// /// # Examples /// `a::b::C<D>` (without disambiguator) /// `a::b::C::<D>` (with disambiguator) /// `Fn(Args)` (without disambiguator) /// `Fn::(Args)` (with disambiguator) pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { self.parse_path_common(style, true) } crate fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |path| { if style == PathStyle::Mod && path.segments.iter().any(|segment| segment.args.is_some()) { self.diagnostic().span_err(path.span, "unexpected generic arguments in path"); } path }); let lo = self.meta_var_span.unwrap_or(self.span); let mut segments = Vec::new(); if self.eat(&token::ModSep) { segments.push(PathSegment::crate_root(lo.shrink_to_lo())); } self.parse_path_segments(&mut segments, style, enable_warning)?; Ok(ast::Path { segments, span: lo.to(self.prev_span) }) } /// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat. /// This is used when parsing derive macro paths in `#[derive]` attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(meta.ident.clone()), _ => None, }, _ => None, }, _ => None, }; if let Some(path) = meta_ident { self.bump(); return Ok(path); } self.parse_path(style) } fn parse_path_segments(&mut self, segments: &mut Vec<PathSegment>, style: PathStyle, enable_warning: bool) -> PResult<'a, ()> { loop { segments.push(self.parse_path_segment(style, enable_warning)?); if self.is_import_coupler() || !self.eat(&token::ModSep) { return Ok(()); } } } fn parse_path_segment(&mut self, style: PathStyle, enable_warning: bool) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; let is_args_start = |token: &token::Token| match *token { token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) => true, _ => false, }; let check_args_start = |this: &mut Self| { this.expected_tokens.extend_from_slice( &[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))] ); is_args_start(&this.token) }; Ok(if style == PathStyle::Type && check_args_start(self) || style != PathStyle::Mod && self.check(&token::ModSep) && self.look_ahead(1, |t| is_args_start(t)) { // Generic arguments are found - `<`, `(`, `::<` or `::(`. let lo = self.span; if self.eat(&token::ModSep) && style == PathStyle::Type && enable_warning { self.diagnostic().struct_span_warn(self.prev_span, "unnecessary path disambiguator") .span_label(self.prev_span, "try removing `::`").emit(); } let args = if self.eat_lt() { // `<'a, T, A = U>` let (args, bindings) = self.parse_generic_args()?; self.expect_gt()?; let span = lo.to(self.prev_span); AngleBracketedArgs { args, bindings, span }.into() } else { // `(T, U) -> R` self.bump(); // `(` let inputs = self.parse_seq_to_before_tokens( &[&token::CloseDelim(token::Paren)], SeqSep::trailing_allowed(token::Comma), TokenExpectType::Expect, |p| p.parse_ty())?; self.bump(); // `)` let span = lo.to(self.prev_span); let output = if self.eat(&token::RArrow) { Some(self.parse_ty_common(false, false)?) } else { None }; ParenthesisedArgs { inputs, output, span }.into() }; PathSegment { ident, args } } else { // Generic arguments are not found. PathSegment::from_ident(ident) }) } crate fn check_lifetime(&mut self) -> bool { self.expected_tokens.push(TokenType::Lifetime); self.token.is_lifetime() } /// Parse single lifetime 'a or panic. crate fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { let span = self.span; self.bump(); Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID } } else { self.span_bug(self.span, "not a lifetime") } } fn eat_label(&mut self) -> Option<Label> { if let Some(ident) = self.token.lifetime() { let span = self.span; self.bump(); Some(Label { ident: Ident::new(ident.name, span) }) } else { None } } /// Parse mutability (`mut` or nothing). fn parse_mutability(&mut self) -> Mutability { if self.eat_keyword(keywords::Mut) { Mutability::Mutable } else { Mutability::Immutable } } fn parse_field_name(&mut self) -> PResult<'a, Ident> { if let token::Literal(token::Integer(name), None) = self.token { self.bump(); Ok(Ident::new(name, self.prev_span)) } else { self.parse_ident_common(false) } } /// Parse ident (COLON expr)? fn parse_field(&mut self) -> PResult<'a, Field> { let attrs = self.parse_outer_attributes()?; let lo = self.span; // Check if a colon exists one ahead. This means we're parsing a fieldname. let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { let fieldname = self.parse_field_name()?; self.bump(); // `:` (fieldname, self.parse_expr()?, false) } else { let fieldname = self.parse_ident_common(false)?; // Mimic `x: x` for the `x` field shorthand. let path = ast::Path::from_ident(fieldname); let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new()); (fieldname, expr, true) }; Ok(ast::Field { ident: fieldname, span: lo.to(expr.span), expr, is_shorthand, attrs: attrs.into(), }) } fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> { P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID }) } fn mk_unary(&mut self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind { ExprKind::Unary(unop, expr) } fn mk_binary(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind { ExprKind::Binary(binop, lhs, rhs) } fn mk_call(&mut self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind { ExprKind::Call(f, args) } fn mk_index(&mut self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind { ExprKind::Index(expr, idx) } fn mk_range(&mut self, start: Option<P<Expr>>, end: Option<P<Expr>>, limits: RangeLimits) -> PResult<'a, ast::ExprKind> { if end.is_none() && limits == RangeLimits::Closed { Err(self.span_fatal_err(self.span, Error::InclusiveRangeWithNoEnd)) } else { Ok(ExprKind::Range(start, end, limits)) } } fn mk_assign_op(&mut self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind { ExprKind::AssignOp(binop, lhs, rhs) } pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec<Attribute>) -> P<Expr> { P(Expr { id: ast::DUMMY_NODE_ID, node: ExprKind::Mac(source_map::Spanned {node: m, span: span}), span, attrs, }) } fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> { let delim = match self.token { token::OpenDelim(delim) => delim, _ => { let msg = "expected open delimiter"; let mut err = self.fatal(msg); err.span_label(self.span, msg); return Err(err) } }; let delimited = match self.parse_token_tree() { TokenTree::Delimited(_, delimited) => delimited, _ => unreachable!(), }; let delim = match delim { token::Paren => MacDelimiter::Parenthesis, token::Bracket => MacDelimiter::Bracket, token::Brace => MacDelimiter::Brace, token::NoDelim => self.bug("unexpected no delimiter"), }; Ok((delim, delimited.stream().into())) } /// At the bottom (top?) of the precedence hierarchy, /// parse things like parenthesized exprs, /// macros, return, etc. /// /// NB: This does not parse outer attributes, /// and is private because it only works /// correctly if called from parse_dot_or_call_expr(). fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> { maybe_whole_expr!(self); // Outer attributes are already parsed and will be // added to the return value after the fact. // // Therefore, prevent sub-parser from parsing // attributes by giving them a empty "already parsed" list. let mut attrs = ThinVec::new(); let lo = self.span; let mut hi = self.span; let ex: ExprKind; // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr(). match self.token { token::OpenDelim(token::Paren) => { self.bump(); attrs.extend(self.parse_inner_attributes()?); // (e) is parenthesized e // (e,) is a tuple with only one field, e let mut es = vec![]; let mut trailing_comma = false; while self.token != token::CloseDelim(token::Paren) { es.push(self.parse_expr()?); self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?; if self.check(&token::Comma) { trailing_comma = true; self.bump(); } else { trailing_comma = false; break; } } self.bump(); hi = self.prev_span; ex = if es.len() == 1 && !trailing_comma { ExprKind::Paren(es.into_iter().nth(0).unwrap()) } else { ExprKind::Tup(es) }; } token::OpenDelim(token::Brace) => { return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs); } token::BinOp(token::Or) | token::OrOr => { return self.parse_lambda_expr(attrs); } token::OpenDelim(token::Bracket) => { self.bump(); attrs.extend(self.parse_inner_attributes()?); if self.check(&token::CloseDelim(token::Bracket)) { // Empty vector. self.bump(); ex = ExprKind::Array(Vec::new()); } else { // Nonempty vector. let first_expr = self.parse_expr()?; if self.check(&token::Semi) { // Repeating array syntax: [ 0; 512 ] self.bump(); let count = AnonConst { id: ast::DUMMY_NODE_ID, value: self.parse_expr()?, }; self.expect(&token::CloseDelim(token::Bracket))?; ex = ExprKind::Repeat(first_expr, count); } else if self.check(&token::Comma) { // Vector with two or more elements. self.bump(); let remaining_exprs = self.parse_seq_to_end( &token::CloseDelim(token::Bracket), SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; let mut exprs = vec![first_expr]; exprs.extend(remaining_exprs); ex = ExprKind::Array(exprs); } else { // Vector with one element. self.expect(&token::CloseDelim(token::Bracket))?; ex = ExprKind::Array(vec![first_expr]); } } hi = self.prev_span; } _ => { if self.eat_lt() { let (qself, path) = self.parse_qpath(PathStyle::Expr)?; hi = path.span; return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs)); } if self.span.edition() >= Edition::Edition2018 && self.check_keyword(keywords::Async) { if self.is_async_block() { // check for `async {` and `async move {` return self.parse_async_block(attrs); } else { return self.parse_lambda_expr(attrs); } } if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) { return self.parse_lambda_expr(attrs); } if self.eat_keyword(keywords::If) { return self.parse_if_expr(attrs); } if self.eat_keyword(keywords::For) { let lo = self.prev_span; return self.parse_for_expr(None, lo, attrs); } if self.eat_keyword(keywords::While) { let lo = self.prev_span; return self.parse_while_expr(None, lo, attrs); } if let Some(label) = self.eat_label() { let lo = label.ident.span; self.expect(&token::Colon)?; if self.eat_keyword(keywords::While) { return self.parse_while_expr(Some(label), lo, attrs) } if self.eat_keyword(keywords::For) { return self.parse_for_expr(Some(label), lo, attrs) } if self.eat_keyword(keywords::Loop) { return self.parse_loop_expr(Some(label), lo, attrs) } if self.token == token::OpenDelim(token::Brace) { return self.parse_block_expr(Some(label), lo, BlockCheckMode::Default, attrs); } let msg = "expected `while`, `for`, `loop` or `{` after a label"; let mut err = self.fatal(msg); err.span_label(self.span, msg); return Err(err); } if self.eat_keyword(keywords::Loop) { let lo = self.prev_span; return self.parse_loop_expr(None, lo, attrs); } if self.eat_keyword(keywords::Continue) { let label = self.eat_label(); let ex = ExprKind::Continue(label); let hi = self.prev_span; return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } if self.eat_keyword(keywords::Match) { return self.parse_match_expr(attrs); } if self.eat_keyword(keywords::Unsafe) { return self.parse_block_expr( None, lo, BlockCheckMode::Unsafe(ast::UserProvided), attrs); } if self.is_catch_expr() { let lo = self.span; assert!(self.eat_keyword(keywords::Do)); assert!(self.eat_keyword(keywords::Catch)); return self.parse_catch_expr(lo, attrs); } if self.eat_keyword(keywords::Return) { if self.token.can_begin_expr() { let e = self.parse_expr()?; hi = e.span; ex = ExprKind::Ret(Some(e)); } else { ex = ExprKind::Ret(None); } } else if self.eat_keyword(keywords::Break) { let label = self.eat_label(); let e = if self.token.can_begin_expr() && !(self.token == token::OpenDelim(token::Brace) && self.restrictions.contains( Restrictions::NO_STRUCT_LITERAL)) { Some(self.parse_expr()?) } else { None }; ex = ExprKind::Break(label, e); hi = self.prev_span; } else if self.eat_keyword(keywords::Yield) { if self.token.can_begin_expr() { let e = self.parse_expr()?; hi = e.span; ex = ExprKind::Yield(Some(e)); } else { ex = ExprKind::Yield(None); } } else if self.token.is_keyword(keywords::Let) { // Catch this syntax error here, instead of in `parse_ident`, so // that we can explicitly mention that let is not to be used as an expression let mut db = self.fatal("expected expression, found statement (`let`)"); db.span_label(self.span, "expected expression"); db.note("variable declaration using `let` is a statement"); return Err(db); } else if self.token.is_path_start() { let pth = self.parse_path(PathStyle::Expr)?; // `!`, as an operator, is prefix, so we know this isn't that if self.eat(&token::Not) { // MACRO INVOCATION expression let (delim, tts) = self.expect_delimited_token_tree()?; let hi = self.prev_span; let node = Mac_ { path: pth, tts, delim }; return Ok(self.mk_mac_expr(lo.to(hi), node, attrs)) } if self.check(&token::OpenDelim(token::Brace)) { // This is a struct literal, unless we're prohibited // from parsing struct literals here. let prohibited = self.restrictions.contains( Restrictions::NO_STRUCT_LITERAL ); if !prohibited { return self.parse_struct_expr(lo, pth, attrs); } } hi = pth.span; ex = ExprKind::Path(None, pth); } else { match self.parse_literal_maybe_minus() { Ok(expr) => { hi = expr.span; ex = expr.node.clone(); } Err(mut err) => { self.cancel(&mut err); let msg = format!("expected expression, found {}", self.this_token_descr()); let mut err = self.fatal(&msg); err.span_label(self.span, "expected expression"); return Err(err); } } } } } let expr = Expr { node: ex, span: lo.to(hi), id: ast::DUMMY_NODE_ID, attrs }; let expr = self.maybe_recover_from_bad_qpath(expr, true)?; return Ok(P(expr)); } fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let struct_sp = lo.to(self.prev_span); self.bump(); let mut fields = Vec::new(); let mut base = None; attrs.extend(self.parse_inner_attributes()?); while self.token != token::CloseDelim(token::Brace) { if self.eat(&token::DotDot) { let exp_span = self.prev_span; match self.parse_expr() { Ok(e) => { base = Some(e); } Err(mut e) => { e.emit(); self.recover_stmt(); } } if self.token == token::Comma { let mut err = self.sess.span_diagnostic.mut_span_err( exp_span.to(self.prev_span), "cannot use a comma after the base struct", ); err.span_suggestion_short_with_applicability( self.span, "remove this comma", "".to_owned(), Applicability::MachineApplicable ); err.note("the base struct must always be the last field"); err.emit(); self.recover_stmt(); } break; } match self.parse_field() { Ok(f) => fields.push(f), Err(mut e) => { e.span_label(struct_sp, "while parsing this struct"); e.emit(); // If the next token is a comma, then try to parse // what comes next as additional fields, rather than // bailing out until next `}`. if self.token != token::Comma { self.recover_stmt(); break; } } } match self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) { Ok(()) => {} Err(mut e) => { e.emit(); self.recover_stmt(); break; } } } let span = lo.to(self.span); self.expect(&token::CloseDelim(token::Brace))?; return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs)); } fn parse_or_use_outer_attributes(&mut self, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, ThinVec<Attribute>> { if let Some(attrs) = already_parsed_attrs { Ok(attrs) } else { self.parse_outer_attributes().map(|a| a.into()) } } /// Parse a block or unsafe block fn parse_block_expr(&mut self, opt_label: Option<Label>, lo: Span, blk_mode: BlockCheckMode, outer_attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { self.expect(&token::OpenDelim(token::Brace))?; let mut attrs = outer_attrs; attrs.extend(self.parse_inner_attributes()?); let blk = self.parse_block_tail(lo, blk_mode)?; return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs)); } /// parse a.b or a(13) or a[4] or just a fn parse_dot_or_call_expr(&mut self, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let b = self.parse_bottom_expr(); let (span, b) = self.interpolated_or_expr_span(b)?; self.parse_dot_or_call_expr_with(b, span, attrs) } fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>, lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { // Stitch the list of outer attributes onto the return value. // A little bit ugly, but the best way given the current code // structure self.parse_dot_or_call_expr_with_(e0, lo) .map(|expr| expr.map(|mut expr| { attrs.extend::<Vec<_>>(expr.attrs.into()); expr.attrs = attrs; match expr.node { ExprKind::If(..) | ExprKind::IfLet(..) => { if !expr.attrs.is_empty() { // Just point to the first attribute in there... let span = expr.attrs[0].span; self.span_err(span, "attributes are not yet allowed on `if` \ expressions"); } } _ => {} } expr }) ) } // Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { let segment = self.parse_path_segment(PathStyle::Expr, true)?; Ok(match self.token { token::OpenDelim(token::Paren) => { // Method call `expr.f()` let mut args = self.parse_unspanned_seq( &token::OpenDelim(token::Paren), &token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; args.insert(0, self_arg); let span = lo.to(self.prev_span); self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new()) } _ => { // Field access `expr.f` if let Some(args) = segment.args { self.span_err(args.span(), "field expressions may not have generic arguments"); } let span = lo.to(self.prev_span); self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new()) } }) } fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { let mut e = e0; let mut hi; loop { // expr? while self.eat(&token::Question) { let hi = self.prev_span; e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new()); } // expr.f if self.eat(&token::Dot) { match self.token { token::Ident(..) => { e = self.parse_dot_suffix(e, lo)?; } token::Literal(token::Integer(name), _) => { let span = self.span; self.bump(); let field = ExprKind::Field(e, Ident::new(name, span)); e = self.mk_expr(lo.to(span), field, ThinVec::new()); } token::Literal(token::Float(n), _suf) => { self.bump(); let fstr = n.as_str(); let mut err = self.diagnostic().struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n)); err.span_label(self.prev_span, "unexpected token"); if fstr.chars().all(|x| "0123456789.".contains(x)) { let float = match fstr.parse::<f64>().ok() { Some(f) => f, None => continue, }; let sugg = pprust::to_string(|s| { use print::pprust::PrintState; s.popen()?; s.print_expr(&e)?; s.s.word( ".")?; s.print_usize(float.trunc() as usize)?; s.pclose()?; s.s.word(".")?; s.s.word(fstr.splitn(2, ".").last().unwrap()) }); err.span_suggestion_with_applicability( lo.to(self.prev_span), "try parenthesizing the first index", sugg, Applicability::MachineApplicable ); } return Err(err); } _ => { // FIXME Could factor this out into non_fatal_unexpected or something. let actual = self.this_token_to_string(); self.span_err(self.span, &format!("unexpected token: `{}`", actual)); } } continue; } if self.expr_is_complete(&e) { break; } match self.token { // expr(...) token::OpenDelim(token::Paren) => { let es = self.parse_unspanned_seq( &token::OpenDelim(token::Paren), &token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; hi = self.prev_span; let nd = self.mk_call(e, es); e = self.mk_expr(lo.to(hi), nd, ThinVec::new()); } // expr[...] // Could be either an index expression or a slicing expression. token::OpenDelim(token::Bracket) => { self.bump(); let ix = self.parse_expr()?; hi = self.span; self.expect(&token::CloseDelim(token::Bracket))?; let index = self.mk_index(e, ix); e = self.mk_expr(lo.to(hi), index, ThinVec::new()) } _ => return Ok(e) } } return Ok(e); } crate fn process_potential_macro_variable(&mut self) { let (token, span) = match self.token { token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); let name = match self.token { token::Ident(ident, _) => ident, _ => unreachable!() }; let mut err = self.fatal(&format!("unknown macro variable `{}`", name)); err.span_label(self.span, "unknown macro variable"); err.emit(); return } token::Interpolated(ref nt) => { self.meta_var_span = Some(self.span); // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match nt.0 { token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span), token::NtLifetime(ident) => (token::Lifetime(ident), ident.span), _ => return, } } _ => return, }; self.token = token; self.span = span; } /// parse a single token tree from the input. crate fn parse_token_tree(&mut self) -> TokenTree { match self.token { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap()); self.span = frame.span; self.bump(); TokenTree::Delimited(frame.span, Delimited { delim: frame.delim, tts: frame.tree_cursor.original_stream().into(), }) }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); self.bump(); TokenTree::Token(span, token) } } } // parse a stream of tokens into a list of TokenTree's, // up to EOF. pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> { let mut tts = Vec::new(); while self.token != token::Eof { tts.push(self.parse_token_tree()); } Ok(tts) } pub fn parse_tokens(&mut self) -> TokenStream { let mut result = Vec::new(); loop { match self.token { token::Eof | token::CloseDelim(..) => break, _ => result.push(self.parse_token_tree().into()), } } TokenStream::concat(result) } /// Parse a prefix-unary-operator expr fn parse_prefix_expr(&mut self, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() let (hi, ex) = match self.token { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; (lo.to(span), self.mk_unary(UnOp::Not, e)) } // Suggest `!` for bitwise negation when encountering a `~` token::Tilde => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; let span_of_tilde = lo; let mut err = self.diagnostic().struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator"); err.span_suggestion_short_with_applicability( span_of_tilde, "use `!` to perform bitwise negation", "!".to_owned(), Applicability::MachineApplicable ); err.emit(); (lo.to(span), self.mk_unary(UnOp::Not, e)) } token::BinOp(token::Minus) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; (lo.to(span), self.mk_unary(UnOp::Neg, e)) } token::BinOp(token::Star) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; (lo.to(span), self.mk_unary(UnOp::Deref, e)) } token::BinOp(token::And) | token::AndAnd => { self.expect_and()?; let m = self.parse_mutability(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; (lo.to(span), ExprKind::AddrOf(m, e)) } token::Ident(..) if self.token.is_keyword(keywords::In) => { self.bump(); let place = self.parse_expr_res( Restrictions::NO_STRUCT_LITERAL, None, )?; let blk = self.parse_block()?; let span = blk.span; let blk_expr = self.mk_expr(span, ExprKind::Block(blk, None), ThinVec::new()); (lo.to(span), ExprKind::ObsoleteInPlace(place, blk_expr)) } token::Ident(..) if self.token.is_keyword(keywords::Box) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; (lo.to(span), ExprKind::Box(e)) } token::Ident(..) if self.token.is_ident_named("not") => { // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` let token_cannot_continue_expr = |t: &token::Token| match *t { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), token::Literal(..) | token::Pound => true, token::Interpolated(ref nt) => match nt.0 { token::NtIdent(..) | token::NtExpr(..) | token::NtBlock(..) | token::NtPath(..) => true, _ => false, }, _ => false }; let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr); if cannot_continue_expr { self.bump(); // Emit the error ... let mut err = self.diagnostic() .struct_span_err(self.span, &format!("unexpected {} after identifier", self.this_token_descr())); // span the `not` plus trailing whitespace to avoid // trailing whitespace after the `!` in our suggestion let to_replace = self.sess.source_map() .span_until_non_whitespace(lo.to(self.span)); err.span_suggestion_short_with_applicability( to_replace, "use `!` to perform logical negation", "!".to_owned(), Applicability::MachineApplicable ); err.emit(); // —and recover! (just as if we were in the block // for the `token::Not` arm) let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; (lo.to(span), self.mk_unary(UnOp::Not, e)) } else { return self.parse_dot_or_call_expr(Some(attrs)); } } _ => { return self.parse_dot_or_call_expr(Some(attrs)); } }; return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } /// Parse an associative expression /// /// This parses an expression accounting for associativity and precedence of the operators in /// the expression. fn parse_assoc_expr(&mut self, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { self.parse_assoc_expr_with(0, already_parsed_attrs.into()) } /// Parse an associative expression with operators of at least `min_prec` precedence fn parse_assoc_expr_with(&mut self, min_prec: usize, lhs: LhsExpr) -> PResult<'a, P<Expr>> { let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs { expr } else { let attrs = match lhs { LhsExpr::AttributesParsed(attrs) => Some(attrs), _ => None, }; if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) { return self.parse_prefix_range_expr(attrs); } else { self.parse_prefix_expr(attrs)? } }; if self.expr_is_complete(&lhs) { // Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071 return Ok(lhs); } self.expected_tokens.push(TokenType::Operator); while let Some(op) = AssocOp::from_token(&self.token) { // Adjust the span for interpolated LHS to point to the `$lhs` token and not to what // it refers to. Interpolated identifiers are unwrapped early and never show up here // as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process // it as "interpolated", it doesn't change the answer for non-interpolated idents. let lhs_span = match (self.prev_token_kind, &lhs.node) { (PrevTokenKind::Interpolated, _) => self.prev_span, (PrevTokenKind::Ident, &ExprKind::Path(None, ref path)) if path.segments.len() == 1 => self.prev_span, _ => lhs.span, }; let cur_op_span = self.span; let restrictions = if op.is_assign_like() { self.restrictions & Restrictions::NO_STRUCT_LITERAL } else { self.restrictions }; if op.precedence() < min_prec { break; } // Check for deprecated `...` syntax if self.token == token::DotDotDot && op == AssocOp::DotDotEq { self.err_dotdotdot_syntax(self.span); } self.bump(); if op.is_comparison() { self.check_no_chained_comparison(&lhs, &op); } // Special cases: if op == AssocOp::As { lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; continue } else if op == AssocOp::Colon { lhs = match self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type) { Ok(lhs) => lhs, Err(mut err) => { err.span_label(self.span, "expecting a type here because of type ascription"); let cm = self.sess.source_map(); let cur_pos = cm.lookup_char_pos(self.span.lo()); let op_pos = cm.lookup_char_pos(cur_op_span.hi()); if cur_pos.line != op_pos.line { err.span_suggestion_with_applicability( cur_op_span, "try using a semicolon", ";".to_string(), Applicability::MaybeIncorrect // speculative ); } return Err(err); } }; continue } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { // If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to // generalise it to the Fixity::None code. // // We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other // two variants are handled with `parse_prefix_range_expr` call above. let rhs = if self.is_at_start_of_range_notation_rhs() { Some(self.parse_assoc_expr_with(op.precedence() + 1, LhsExpr::NotYetParsed)?) } else { None }; let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs { x.span } else { cur_op_span }); let limits = if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed }; let r = try!(self.mk_range(Some(lhs), rhs, limits)); lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new()); break } let rhs = match op.fixity() { Fixity::Right => self.with_res( restrictions - Restrictions::STMT_EXPR, |this| { this.parse_assoc_expr_with(op.precedence(), LhsExpr::NotYetParsed) }), Fixity::Left => self.with_res( restrictions - Restrictions::STMT_EXPR, |this| { this.parse_assoc_expr_with(op.precedence() + 1, LhsExpr::NotYetParsed) }), // We currently have no non-associative operators that are not handled above by // the special cases. The code is here only for future convenience. Fixity::None => self.with_res( restrictions - Restrictions::STMT_EXPR, |this| { this.parse_assoc_expr_with(op.precedence() + 1, LhsExpr::NotYetParsed) }), }?; let span = lhs_span.to(rhs.span); lhs = match op { AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight | AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual | AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); self.mk_expr(span, binary, ThinVec::new()) } AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()), AssocOp::ObsoleteInPlace => self.mk_expr(span, ExprKind::ObsoleteInPlace(lhs, rhs), ThinVec::new()), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, token::Minus => BinOpKind::Sub, token::Star => BinOpKind::Mul, token::Slash => BinOpKind::Div, token::Percent => BinOpKind::Rem, token::Caret => BinOpKind::BitXor, token::And => BinOpKind::BitAnd, token::Or => BinOpKind::BitOr, token::Shl => BinOpKind::Shl, token::Shr => BinOpKind::Shr, }; let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); self.mk_expr(span, aopexpr, ThinVec::new()) } AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => { self.bug("AssocOp should have been handled by special case") } }; if op.fixity() == Fixity::None { break } } Ok(lhs) } fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span, expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind) -> PResult<'a, P<Expr>> { let mk_expr = |this: &mut Self, rhs: P<Ty>| { this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new()) }; // Save the state of the parser before parsing type normally, in case there is a // LessThan comparison after this cast. let parser_snapshot_before_type = self.clone(); match self.parse_ty_no_plus() { Ok(rhs) => { Ok(mk_expr(self, rhs)) } Err(mut type_err) => { // Rewind to before attempting to parse the type with generics, to recover // from situations like `x as usize < y` in which we first tried to parse // `usize < y` as a type with generic arguments. let parser_snapshot_after_type = self.clone(); mem::replace(self, parser_snapshot_before_type); match self.parse_path(PathStyle::Expr) { Ok(path) => { let (op_noun, op_verb) = match self.token { token::Lt => ("comparison", "comparing"), token::BinOp(token::Shl) => ("shift", "shifting"), _ => { // We can end up here even without `<` being the next token, for // example because `parse_ty_no_plus` returns `Err` on keywords, // but `parse_path` returns `Ok` on them due to error recovery. // Return original error and parser state. mem::replace(self, parser_snapshot_after_type); return Err(type_err); } }; // Successfully parsed the type path leaving a `<` yet to parse. type_err.cancel(); // Report non-fatal diagnostics, keep `x as usize` as an expression // in AST and continue parsing. let msg = format!("`<` is interpreted as a start of generic \ arguments for `{}`, not a {}", path, op_noun); let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg); err.span_label(self.look_ahead_span(1).to(parser_snapshot_after_type.span), "interpreted as generic arguments"); err.span_label(self.span, format!("not interpreted as {}", op_noun)); let expr = mk_expr(self, P(Ty { span: path.span, node: TyKind::Path(None, path), id: ast::DUMMY_NODE_ID })); let expr_str = self.sess.source_map().span_to_snippet(expr.span) .unwrap_or(pprust::expr_to_string(&expr)); err.span_suggestion_with_applicability( expr.span, &format!("try {} the cast value", op_verb), format!("({})", expr_str), Applicability::MachineApplicable ); err.emit(); Ok(expr) } Err(mut path_err) => { // Couldn't parse as a path, return original error and parser state. path_err.cancel(); mem::replace(self, parser_snapshot_after_type); Err(type_err) } } } } } /// Produce an error if comparison operators are chained (RFC #558). /// We only need to check lhs, not rhs, because all comparison ops /// have same precedence and are left-associative fn check_no_chained_comparison(&mut self, lhs: &Expr, outer_op: &AssocOp) { debug_assert!(outer_op.is_comparison(), "check_no_chained_comparison: {:?} is not comparison", outer_op); match lhs.node { ExprKind::Binary(op, _, _) if op.node.is_comparison() => { // respan to include both operators let op_span = op.span.to(self.span); let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); if op.node == BinOpKind::Lt && *outer_op == AssocOp::Less || // Include `<` to provide this recommendation *outer_op == AssocOp::Greater // even in a case like the following: { // Foo<Bar<Baz<Qux, ()>>> err.help( "use `::<...>` instead of `<...>` if you meant to specify type arguments"); err.help("or use `(...)` if you meant to specify fn arguments"); } err.emit(); } _ => {} } } /// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr` fn parse_prefix_range_expr(&mut self, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { // Check for deprecated `...` syntax if self.token == token::DotDotDot { self.err_dotdotdot_syntax(self.span); } debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token), "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq", self.token); let tok = self.token.clone(); let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; let mut hi = self.span; self.bump(); let opt_end = if self.is_at_start_of_range_notation_rhs() { // RHS must be parsed with more associativity than the dots. let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1; Some(self.parse_assoc_expr_with(next_prec, LhsExpr::NotYetParsed) .map(|x|{ hi = x.span; x })?) } else { None }; let limits = if tok == token::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed }; let r = try!(self.mk_range(None, opt_end, limits)); Ok(self.mk_expr(lo.to(hi), r, attrs)) } fn is_at_start_of_range_notation_rhs(&self) -> bool { if self.token.can_begin_expr() { // parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. if self.token == token::OpenDelim(token::Brace) { return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); } true } else { false } } /// Parse an 'if' or 'if let' expression ('if' token already eaten) fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(attrs); } let lo = self.prev_span; let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; // Verify that the parsed `if` condition makes sense as a condition. If it is a block, then // verify that the last statement is either an implicit return (no `;`) or an explicit // return. This won't catch blocks with an explicit `return`, but that would be caught by // the dead code lint. if self.eat_keyword(keywords::Else) || !cond.returns() { let sp = self.sess.source_map().next_point(lo); let mut err = self.diagnostic() .struct_span_err(sp, "missing condition for `if` statemement"); err.span_label(sp, "expected if condition here"); return Err(err) } let not_block = self.token != token::OpenDelim(token::Brace); let thn = self.parse_block().map_err(|mut err| { if not_block { err.span_label(lo, "this `if` statement has a condition, but no block"); } err })?; let mut els: Option<P<Expr>> = None; let mut hi = thn.span; if self.eat_keyword(keywords::Else) { let elexpr = self.parse_else_expr()?; hi = elexpr.span; els = Some(elexpr); } Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs)) } /// Parse an 'if let' expression ('if' token already eaten) fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let lo = self.prev_span; self.expect_keyword(keywords::Let)?; let pats = self.parse_pats()?; self.expect(&token::Eq)?; let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let thn = self.parse_block()?; let (hi, els) = if self.eat_keyword(keywords::Else) { let expr = self.parse_else_expr()?; (expr.span, Some(expr)) } else { (thn.span, None) }; Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs)) } // `move |args| expr` fn parse_lambda_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let lo = self.span; let movability = if self.eat_keyword(keywords::Static) { Movability::Static } else { Movability::Movable }; let asyncness = if self.span.edition() >= Edition::Edition2018 { self.parse_asyncness() } else { IsAsync::NotAsync }; let capture_clause = if self.eat_keyword(keywords::Move) { CaptureBy::Value } else { CaptureBy::Ref }; let decl = self.parse_fn_block_decl()?; let decl_hi = self.prev_span; let body = match decl.output { FunctionRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR; self.parse_expr_res(restrictions, None)? }, _ => { // If an explicit return type is given, require a // block to appear (RFC 968). let body_lo = self.span; self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())? } }; Ok(self.mk_expr( lo.to(body.span), ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)), attrs)) } // `else` token already eaten fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> { if self.eat_keyword(keywords::If) { return self.parse_if_expr(ThinVec::new()); } else { let blk = self.parse_block()?; return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new())); } } /// Parse a 'for' .. 'in' expression ('for' token already eaten) fn parse_for_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { // Parse: `for <src_pat> in <src_expr> <src_loop_block>` let pat = self.parse_top_level_pat()?; if !self.eat_keyword(keywords::In) { let in_span = self.prev_span.between(self.span); let mut err = self.sess.span_diagnostic .struct_span_err(in_span, "missing `in` in `for` loop"); err.span_suggestion_short_with_applicability( in_span, "try adding `in` here", " in ".into(), // has been misleading, at least in the past (closed Issue #48492) Applicability::MaybeIncorrect ); err.emit(); } let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let hi = self.prev_span; Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs)) } /// Parse a 'while' or 'while let' expression ('while' token already eaten) fn parse_while_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { if self.token.is_keyword(keywords::Let) { return self.parse_while_let_expr(opt_label, span_lo, attrs); } let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let span = span_lo.to(body.span); return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs)); } /// Parse a 'while let' expression ('while' token already eaten) fn parse_while_let_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { self.expect_keyword(keywords::Let)?; let pats = self.parse_pats()?; self.expect(&token::Eq)?; let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let span = span_lo.to(body.span); return Ok(self.mk_expr(span, ExprKind::WhileLet(pats, expr, body, opt_label), attrs)); } // parse `loop {...}`, `loop` token already eaten fn parse_loop_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let span = span_lo.to(body.span); Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs)) } /// Parse an `async move {...}` expression pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let span_lo = self.span; self.expect_keyword(keywords::Async)?; let capture_clause = if self.eat_keyword(keywords::Move) { CaptureBy::Value } else { CaptureBy::Ref }; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); Ok(self.mk_expr( span_lo.to(body.span), ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs)) } /// Parse a `do catch {...}` expression (`do catch` token already eaten) fn parse_catch_expr(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); Ok(self.mk_expr(span_lo.to(body.span), ExprKind::Catch(body), attrs)) } // `match` token already eaten fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let match_span = self.prev_span; let lo = self.prev_span; let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { if self.token == token::Token::Semi { e.span_suggestion_short_with_applicability( match_span, "try removing this `match`", "".to_owned(), Applicability::MaybeIncorrect // speculative ); } return Err(e) } attrs.extend(self.parse_inner_attributes()?); let mut arms: Vec<Arm> = Vec::new(); while self.token != token::CloseDelim(token::Brace) { match self.parse_arm() { Ok(arm) => arms.push(arm), Err(mut e) => { // Recover by skipping to the end of the block. e.emit(); self.recover_stmt(); let span = lo.to(self.span); if self.token == token::CloseDelim(token::Brace) { self.bump(); } return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs)); } } } let hi = self.span; self.bump(); return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); } crate fn parse_arm(&mut self) -> PResult<'a, Arm> { maybe_whole!(self, NtArm, |x| x); let attrs = self.parse_outer_attributes()?; // Allow a '|' before the pats (RFC 1925) self.eat(&token::BinOp(token::Or)); let pats = self.parse_pats()?; let guard = if self.eat_keyword(keywords::If) { Some(self.parse_expr()?) } else { None }; let arrow_span = self.span; self.expect(&token::FatArrow)?; let arm_start_span = self.span; let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None) .map_err(|mut err| { err.span_label(arrow_span, "while parsing the `match` arm starting here"); err })?; let require_comma = classify::expr_requires_semi_to_be_stmt(&expr) && self.token != token::CloseDelim(token::Brace); if require_comma { let cm = self.sess.source_map(); self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) .map_err(|mut err| { match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) { (Ok(ref expr_lines), Ok(ref arm_start_lines)) if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col && expr_lines.lines.len() == 2 && self.token == token::FatArrow => { // We check whether there's any trailing code in the parse span, // if there isn't, we very likely have the following: // // X | &Y => "y" // | -- - missing comma // | | // | arrow_span // X | &X => "x" // | - ^^ self.span // | | // | parsed until here as `"y" & X` err.span_suggestion_short_with_applicability( cm.next_point(arm_start_span), "missing a comma here to end this `match` arm", ",".to_owned(), Applicability::MachineApplicable ); } _ => { err.span_label(arrow_span, "while parsing the `match` arm starting here"); } } err })?; } else { self.eat(&token::Comma); } Ok(ast::Arm { attrs, pats, guard, body: expr, }) } /// Parse an expression pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> { self.parse_expr_res(Restrictions::empty(), None) } /// Evaluate the closure with restrictions in place. /// /// After the closure is evaluated, restrictions are reset. fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T where F: FnOnce(&mut Self) -> T { let old = self.restrictions; self.restrictions = r; let r = f(self); self.restrictions = old; return r; } /// Parse an expression, subject to the given restrictions fn parse_expr_res(&mut self, r: Restrictions, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) } /// Parse the RHS of a local variable declaration (e.g. '= 14;') fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> { if self.check(&token::Eq) { self.bump(); Ok(Some(self.parse_expr()?)) } else if skip_eq { Ok(Some(self.parse_expr()?)) } else { Ok(None) } } /// Parse patterns, separated by '|' s fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> { let mut pats = Vec::new(); loop { pats.push(self.parse_top_level_pat()?); if self.token == token::OrOr { let mut err = self.struct_span_err(self.span, "unexpected token `||` after pattern"); err.span_suggestion_with_applicability( self.span, "use a single `|` to specify multiple patterns", "|".to_owned(), Applicability::MachineApplicable ); err.emit(); self.bump(); } else if self.check(&token::BinOp(token::Or)) { self.bump(); } else { return Ok(pats); } }; } // Parses a parenthesized list of patterns like // `()`, `(p)`, `(p,)`, `(p, q)`, or `(p, .., q)`. Returns: // - a vector of the patterns that were parsed // - an option indicating the index of the `..` element // - a boolean indicating whether a trailing comma was present. // Trailing commas are significant because (p) and (p,) are different patterns. fn parse_parenthesized_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> { self.expect(&token::OpenDelim(token::Paren))?; let result = self.parse_pat_list()?; self.expect(&token::CloseDelim(token::Paren))?; Ok(result) } fn parse_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> { let mut fields = Vec::new(); let mut ddpos = None; let mut trailing_comma = false; loop { if self.eat(&token::DotDot) { if ddpos.is_none() { ddpos = Some(fields.len()); } else { // Emit a friendly error, ignore `..` and continue parsing self.span_err(self.prev_span, "`..` can only be used once per tuple or tuple struct pattern"); } } else if !self.check(&token::CloseDelim(token::Paren)) { fields.push(self.parse_pat()?); } else { break } trailing_comma = self.eat(&token::Comma); if !trailing_comma { break } } if ddpos == Some(fields.len()) && trailing_comma { // `..` needs to be followed by `)` or `, pat`, `..,)` is disallowed. self.span_err(self.prev_span, "trailing comma is not permitted after `..`"); } Ok((fields, ddpos, trailing_comma)) } fn parse_pat_vec_elements( &mut self, ) -> PResult<'a, (Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> { let mut before = Vec::new(); let mut slice = None; let mut after = Vec::new(); let mut first = true; let mut before_slice = true; while self.token != token::CloseDelim(token::Bracket) { if first { first = false; } else { self.expect(&token::Comma)?; if self.token == token::CloseDelim(token::Bracket) && (before_slice || !after.is_empty()) { break } } if before_slice { if self.eat(&token::DotDot) { if self.check(&token::Comma) || self.check(&token::CloseDelim(token::Bracket)) { slice = Some(P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Wild, span: self.prev_span, })); before_slice = false; } continue } } let subpat = self.parse_pat()?; if before_slice && self.eat(&token::DotDot) { slice = Some(subpat); before_slice = false; } else if before_slice { before.push(subpat); } else { after.push(subpat); } } Ok((before, slice, after)) } fn parse_pat_field( &mut self, lo: Span, attrs: Vec<Attribute> ) -> PResult<'a, source_map::Spanned<ast::FieldPat>> { // Check if a colon exists one ahead. This means we're parsing a fieldname. let hi; let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { // Parsing a pattern of the form "fieldname: pat" let fieldname = self.parse_field_name()?; self.bump(); let pat = self.parse_pat()?; hi = pat.span; (pat, fieldname, false) } else { // Parsing a pattern of the form "(box) (ref) (mut) fieldname" let is_box = self.eat_keyword(keywords::Box); let boxed_span = self.span; let is_ref = self.eat_keyword(keywords::Ref); let is_mut = self.eat_keyword(keywords::Mut); let fieldname = self.parse_ident()?; hi = self.prev_span; let bind_type = match (is_ref, is_mut) { (true, true) => BindingMode::ByRef(Mutability::Mutable), (true, false) => BindingMode::ByRef(Mutability::Immutable), (false, true) => BindingMode::ByValue(Mutability::Mutable), (false, false) => BindingMode::ByValue(Mutability::Immutable), }; let fieldpat = P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(bind_type, fieldname, None), span: boxed_span.to(hi), }); let subpat = if is_box { P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Box(fieldpat), span: lo.to(hi), }) } else { fieldpat }; (subpat, fieldname, true) }; Ok(source_map::Spanned { span: lo.to(hi), node: ast::FieldPat { ident: fieldname, pat: subpat, is_shorthand, attrs: attrs.into(), } }) } /// Parse the fields of a struct-like pattern fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> { let mut fields = Vec::new(); let mut etc = false; let mut ate_comma = true; let mut delayed_err: Option<DiagnosticBuilder<'a>> = None; let mut etc_span = None; while self.token != token::CloseDelim(token::Brace) { let attrs = self.parse_outer_attributes()?; let lo = self.span; // check that a comma comes after every field if !ate_comma { let err = self.struct_span_err(self.prev_span, "expected `,`"); return Err(err); } ate_comma = false; if self.check(&token::DotDot) || self.token == token::DotDotDot { etc = true; let mut etc_sp = self.span; if self.token == token::DotDotDot { // Issue #46718 // Accept `...` as if it were `..` to avoid further errors let mut err = self.struct_span_err(self.span, "expected field pattern, found `...`"); err.span_suggestion_with_applicability( self.span, "to omit remaining fields, use one fewer `.`", "..".to_owned(), Applicability::MachineApplicable ); err.emit(); } self.bump(); // `..` || `...`:w if self.token == token::CloseDelim(token::Brace) { etc_span = Some(etc_sp); break; } let token_str = self.this_token_to_string(); let mut err = self.fatal(&format!("expected `}}`, found `{}`", token_str)); err.span_label(self.span, "expected `}`"); let mut comma_sp = None; if self.token == token::Comma { // Issue #49257 etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span)); err.span_label(etc_sp, "`..` must be at the end and cannot have a trailing comma"); comma_sp = Some(self.span); self.bump(); ate_comma = true; } etc_span = Some(etc_sp); if self.token == token::CloseDelim(token::Brace) { // If the struct looks otherwise well formed, recover and continue. if let Some(sp) = comma_sp { err.span_suggestion_short(sp, "remove this comma", "".into()); } err.emit(); break; } else if self.token.is_ident() && ate_comma { // Accept fields coming after `..,`. // This way we avoid "pattern missing fields" errors afterwards. // We delay this error until the end in order to have a span for a // suggested fix. if let Some(mut delayed_err) = delayed_err { delayed_err.emit(); return Err(err); } else { delayed_err = Some(err); } } else { if let Some(mut err) = delayed_err { err.emit(); } return Err(err); } } fields.push(match self.parse_pat_field(lo, attrs) { Ok(field) => field, Err(err) => { if let Some(mut delayed_err) = delayed_err { delayed_err.emit(); } return Err(err); } }); ate_comma = self.eat(&token::Comma); } if let Some(mut err) = delayed_err { if let Some(etc_span) = etc_span { err.multipart_suggestion( "move the `..` to the end of the field list", vec![ (etc_span, "".into()), (self.span, format!("{}.. }}", if ate_comma { "" } else { ", " })), ], ); } err.emit(); } return Ok((fields, etc)); } fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> { if self.token.is_path_start() { let lo = self.span; let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = self.parse_qpath(PathStyle::Expr)?; (Some(qself), path) } else { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; let hi = self.prev_span; Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new())) } else { self.parse_literal_maybe_minus() } } // helper function to decide whether to parse as ident binding or to try to do // something more complex like range patterns fn parse_as_ident(&mut self) -> bool { self.look_ahead(1, |t| match *t { token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false), // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the // range pattern branch token::DotDot => None, _ => Some(true), }).unwrap_or_else(|| self.look_ahead(2, |t| match *t { token::Comma | token::CloseDelim(token::Bracket) => true, _ => false, })) } /// A wrapper around `parse_pat` with some special error handling for the /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast /// to subpatterns within such). fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> { let pat = self.parse_pat()?; if self.token == token::Comma { // An unexpected comma after a top-level pattern is a clue that the // user (perhaps more accustomed to some other language) forgot the // parentheses in what should have been a tuple pattern; return a // suggestion-enhanced error here rather than choking on the comma // later. let comma_span = self.span; self.bump(); if let Err(mut err) = self.parse_pat_list() { // We didn't expect this to work anyway; we just wanted // to advance to the end of the comma-sequence so we know // the span to suggest parenthesizing err.cancel(); } let seq_span = pat.span.to(self.prev_span); let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern"); if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) { err.span_suggestion_with_applicability( seq_span, "try adding parentheses", format!("({})", seq_snippet), Applicability::MachineApplicable ); } return Err(err); } Ok(pat) } /// Parse a pattern. pub fn parse_pat(&mut self) -> PResult<'a, P<Pat>> { self.parse_pat_with_range_pat(true) } /// Parse a pattern, with a setting whether modern range patterns e.g. `a..=b`, `a..b` are /// allowed. fn parse_pat_with_range_pat(&mut self, allow_range_pat: bool) -> PResult<'a, P<Pat>> { maybe_whole!(self, NtPat, |x| x); let lo = self.span; let pat; match self.token { token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; let mutbl = self.parse_mutability(); if let token::Lifetime(ident) = self.token { let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", ident)); err.span_label(self.span, "unexpected lifetime"); return Err(err); } let subpat = self.parse_pat_with_range_pat(false)?; pat = PatKind::Ref(subpat, mutbl); } token::OpenDelim(token::Paren) => { // Parse (pat,pat,pat,...) as tuple pattern let (fields, ddpos, trailing_comma) = self.parse_parenthesized_pat_list()?; pat = if fields.len() == 1 && ddpos.is_none() && !trailing_comma { PatKind::Paren(fields.into_iter().nth(0).unwrap()) } else { PatKind::Tuple(fields, ddpos) }; } token::OpenDelim(token::Bracket) => { // Parse [pat,pat,...] as slice pattern self.bump(); let (before, slice, after) = self.parse_pat_vec_elements()?; self.expect(&token::CloseDelim(token::Bracket))?; pat = PatKind::Slice(before, slice, after); } // At this point, token != &, &&, (, [ _ => if self.eat_keyword(keywords::Underscore) { // Parse _ pat = PatKind::Wild; } else if self.eat_keyword(keywords::Mut) { // Parse mut ident @ pat / mut ref ident @ pat let mutref_span = self.prev_span.to(self.span); let binding_mode = if self.eat_keyword(keywords::Ref) { self.diagnostic() .struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect") .span_suggestion_with_applicability( mutref_span, "try switching the order", "ref mut".into(), Applicability::MachineApplicable ).emit(); BindingMode::ByRef(Mutability::Mutable) } else { BindingMode::ByValue(Mutability::Mutable) }; pat = self.parse_pat_ident(binding_mode)?; } else if self.eat_keyword(keywords::Ref) { // Parse ref ident @ pat / ref mut ident @ pat let mutbl = self.parse_mutability(); pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?; } else if self.eat_keyword(keywords::Box) { // Parse box pat let subpat = self.parse_pat_with_range_pat(false)?; pat = PatKind::Box(subpat); } else if self.token.is_ident() && !self.token.is_reserved_ident() && self.parse_as_ident() { // Parse ident @ pat // This can give false positives and parse nullary enums, // they are dealt with later in resolve let binding_mode = BindingMode::ByValue(Mutability::Immutable); pat = self.parse_pat_ident(binding_mode)?; } else if self.token.is_path_start() { // Parse pattern starting with a path let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = self.parse_qpath(PathStyle::Expr)?; (Some(qself), path) } else { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; match self.token { token::Not if qself.is_none() => { // Parse macro invocation self.bump(); let (delim, tts) = self.expect_delimited_token_tree()?; let mac = respan(lo.to(self.prev_span), Mac_ { path, tts, delim }); pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDotEq | token::DotDot => { let end_kind = match self.token { token::DotDot => RangeEnd::Excluded, token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot), token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq), _ => panic!("can only parse `..`/`...`/`..=` for ranges \ (checked above)"), }; let op_span = self.span; // Parse range let span = lo.to(self.prev_span); let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); self.bump(); let end = self.parse_pat_range_end()?; let op = Spanned { span: op_span, node: end_kind }; pat = PatKind::Range(begin, end, op); } token::OpenDelim(token::Brace) => { if qself.is_some() { let msg = "unexpected `{` after qualified path"; let mut err = self.fatal(msg); err.span_label(self.span, msg); return Err(err); } // Parse struct pattern self.bump(); let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| { e.emit(); self.recover_stmt(); (vec![], false) }); self.bump(); pat = PatKind::Struct(path, fields, etc); } token::OpenDelim(token::Paren) => { if qself.is_some() { let msg = "unexpected `(` after qualified path"; let mut err = self.fatal(msg); err.span_label(self.span, msg); return Err(err); } // Parse tuple struct or enum pattern let (fields, ddpos, _) = self.parse_parenthesized_pat_list()?; pat = PatKind::TupleStruct(path, fields, ddpos) } _ => pat = PatKind::Path(qself, path), } } else { // Try to parse everything else as literal with optional minus match self.parse_literal_maybe_minus() { Ok(begin) => { let op_span = self.span; if self.check(&token::DotDot) || self.check(&token::DotDotEq) || self.check(&token::DotDotDot) { let end_kind = if self.eat(&token::DotDotDot) { RangeEnd::Included(RangeSyntax::DotDotDot) } else if self.eat(&token::DotDotEq) { RangeEnd::Included(RangeSyntax::DotDotEq) } else if self.eat(&token::DotDot) { RangeEnd::Excluded } else { panic!("impossible case: we already matched \ on a range-operator token") }; let end = self.parse_pat_range_end()?; let op = Spanned { span: op_span, node: end_kind }; pat = PatKind::Range(begin, end, op); } else { pat = PatKind::Lit(begin); } } Err(mut err) => { self.cancel(&mut err); let msg = format!("expected pattern, found {}", self.this_token_descr()); let mut err = self.fatal(&msg); err.span_label(self.span, "expected pattern"); return Err(err); } } } } let pat = Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID }; let pat = self.maybe_recover_from_bad_qpath(pat, true)?; if !allow_range_pat { match pat.node { PatKind::Range( _, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. } ) => {}, PatKind::Range(..) => { let mut err = self.struct_span_err( pat.span, "the range pattern here has ambiguous interpretation", ); err.span_suggestion_with_applicability( pat.span, "add parentheses to clarify the precedence", format!("({})", pprust::pat_to_string(&pat)), // "ambiguous interpretation" implies that we have to be guessing Applicability::MaybeIncorrect ); return Err(err); } _ => {} } } Ok(P(pat)) } /// Parse ident or ident @ pat /// used by the copy foo and ref foo patterns to give a good /// error message when parsing mistakes like ref foo(a,b) fn parse_pat_ident(&mut self, binding_mode: ast::BindingMode) -> PResult<'a, PatKind> { let ident = self.parse_ident()?; let sub = if self.eat(&token::At) { Some(self.parse_pat()?) } else { None }; // just to be friendly, if they write something like // ref Some(i) // we end up here with ( as the current token. This shortly // leads to a parse error. Note that if there is no explicit // binding mode then we do not end up here, because the lookahead // will direct us over to parse_enum_variant() if self.token == token::OpenDelim(token::Paren) { return Err(self.span_fatal( self.prev_span, "expected identifier, found enum pattern")) } Ok(PatKind::Ident(binding_mode, ident, sub)) } /// Parse a local variable declaration fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> { let lo = self.prev_span; let pat = self.parse_top_level_pat()?; let (err, ty) = if self.eat(&token::Colon) { // Save the state of the parser before parsing type normally, in case there is a `:` // instead of an `=` typo. let parser_snapshot_before_type = self.clone(); let colon_sp = self.prev_span; match self.parse_ty() { Ok(ty) => (None, Some(ty)), Err(mut err) => { // Rewind to before attempting to parse the type and continue parsing let parser_snapshot_after_type = self.clone(); mem::replace(self, parser_snapshot_before_type); let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap(); err.span_label(pat.span, format!("while parsing the type for `{}`", snippet)); (Some((parser_snapshot_after_type, colon_sp, err)), None) } } } else { (None, None) }; let init = match (self.parse_initializer(err.is_some()), err) { (Ok(init), None) => { // init parsed, ty parsed init } (Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error // Could parse the type as if it were the initializer, it is likely there was a // typo in the code: `:` instead of `=`. Add suggestion and emit the error. err.span_suggestion_short_with_applicability( colon_sp, "use `=` if you meant to assign", "=".to_string(), Applicability::MachineApplicable ); err.emit(); // As this was parsed successfully, continue as if the code has been fixed for the // rest of the file. It will still fail due to the emitted error, but we avoid // extra noise. init } (Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error init_err.cancel(); // Couldn't parse the type nor the initializer, only raise the type error and // return to the parser state before parsing the type as the initializer. // let x: <parse_error>; mem::replace(self, snapshot); return Err(ty_err); } (Err(err), None) => { // init error, ty parsed // Couldn't parse the initializer and we're not attempting to recover a failed // parse of the type, return the error. return Err(err); } }; let hi = if self.token == token::Semi { self.span } else { self.prev_span }; Ok(P(ast::Local { ty, pat, init, id: ast::DUMMY_NODE_ID, span: lo.to(hi), attrs, })) } /// Parse a structure field fn parse_name_and_ty(&mut self, lo: Span, vis: Visibility, attrs: Vec<Attribute>) -> PResult<'a, StructField> { let name = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; Ok(StructField { span: lo.to(self.prev_span), ident: Some(name), vis, id: ast::DUMMY_NODE_ID, ty, attrs, }) } /// Emit an expected item after attributes error. fn expected_item_err(&self, attrs: &[Attribute]) { let message = match attrs.last() { Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment", _ => "expected item after attributes", }; self.span_err(self.prev_span, message); } /// Parse a statement. This stops just before trailing semicolons on everything but items. /// e.g. a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed. pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> { Ok(self.parse_stmt_(true)) } // Eat tokens until we can be relatively sure we reached the end of the // statement. This is something of a best-effort heuristic. // // We terminate when we find an unmatched `}` (without consuming it). fn recover_stmt(&mut self) { self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore) } // If `break_on_semi` is `Break`, then we will stop consuming tokens after // finding (and consuming) a `;` outside of `{}` or `[]` (note that this is // approximate - it can mean we break too early due to macros, but that // should only lead to sub-optimal recovery, not inaccurate parsing). // // If `break_on_block` is `Break`, then we will stop consuming tokens // after finding (and consuming) a brace-delimited block. fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) { let mut brace_depth = 0; let mut bracket_depth = 0; let mut in_block = false; debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", break_on_semi, break_on_block); loop { debug!("recover_stmt_ loop {:?}", self.token); match self.token { token::OpenDelim(token::DelimToken::Brace) => { brace_depth += 1; self.bump(); if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0 { in_block = true; } } token::OpenDelim(token::DelimToken::Bracket) => { bracket_depth += 1; self.bump(); } token::CloseDelim(token::DelimToken::Brace) => { if brace_depth == 0 { debug!("recover_stmt_ return - close delim {:?}", self.token); return; } brace_depth -= 1; self.bump(); if in_block && bracket_depth == 0 && brace_depth == 0 { debug!("recover_stmt_ return - block end {:?}", self.token); return; } } token::CloseDelim(token::DelimToken::Bracket) => { bracket_depth -= 1; if bracket_depth < 0 { bracket_depth = 0; } self.bump(); } token::Eof => { debug!("recover_stmt_ return - Eof"); return; } token::Semi => { self.bump(); if break_on_semi == SemiColonMode::Break && brace_depth == 0 && bracket_depth == 0 { debug!("recover_stmt_ return - Semi"); return; } } _ => { self.bump() } } } } fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> { self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| { e.emit(); self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore); None }) } fn is_async_block(&mut self) -> bool { self.token.is_keyword(keywords::Async) && ( ( // `async move {` self.look_ahead(1, |t| t.is_keyword(keywords::Move)) && self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) ) || ( // `async {` self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) ) ) } fn is_catch_expr(&mut self) -> bool { self.token.is_keyword(keywords::Do) && self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) && self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) && // prevent `while catch {} {}`, `if catch {} {} else {}`, etc. !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) } fn is_union_item(&self) -> bool { self.token.is_keyword(keywords::Union) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()) } fn is_crate_vis(&self) -> bool { self.token.is_keyword(keywords::Crate) && self.look_ahead(1, |t| t != &token::ModSep) } fn is_extern_non_path(&self) -> bool { self.token.is_keyword(keywords::Extern) && self.look_ahead(1, |t| t != &token::ModSep) } fn is_existential_type_decl(&self) -> bool { self.token.is_keyword(keywords::Existential) && self.look_ahead(1, |t| t.is_keyword(keywords::Type)) } fn is_auto_trait_item(&mut self) -> bool { // auto trait (self.token.is_keyword(keywords::Auto) && self.look_ahead(1, |t| t.is_keyword(keywords::Trait))) || // unsafe auto trait (self.token.is_keyword(keywords::Unsafe) && self.look_ahead(1, |t| t.is_keyword(keywords::Auto)) && self.look_ahead(2, |t| t.is_keyword(keywords::Trait))) } fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span) -> PResult<'a, Option<P<Item>>> { let token_lo = self.span; let (ident, def) = match self.token { token::Ident(ident, false) if ident.name == keywords::Macro.name() => { self.bump(); let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { match self.parse_token_tree() { TokenTree::Delimited(_, ref delimited) => delimited.stream(), _ => unreachable!(), } } else if self.check(&token::OpenDelim(token::Paren)) { let args = self.parse_token_tree(); let body = if self.check(&token::OpenDelim(token::Brace)) { self.parse_token_tree() } else { self.unexpected()?; unreachable!() }; TokenStream::concat(vec![ args.into(), TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(), body.into(), ]) } else { self.unexpected()?; unreachable!() }; (ident, ast::MacroDef { tokens: tokens.into(), legacy: false }) } token::Ident(ident, _) if ident.name == "macro_rules" && self.look_ahead(1, |t| *t == token::Not) => { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis.node, prev_span); self.bump(); self.bump(); let ident = self.parse_ident()?; let (delim, tokens) = self.expect_delimited_token_tree()?; if delim != MacDelimiter::Brace { if !self.eat(&token::Semi) { let msg = "macros that expand to items must either \ be surrounded with braces or followed by a semicolon"; self.span_err(self.prev_span, msg); } } (ident, ast::MacroDef { tokens: tokens, legacy: true }) } _ => return Ok(None), }; let span = lo.to(self.prev_span); Ok(Some(self.mk_item(span, ident, ItemKind::MacroDef(def), vis.clone(), attrs.to_vec()))) } fn parse_stmt_without_recovery(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { maybe_whole!(self, NtStmt, |x| Some(x)); let attrs = self.parse_outer_attributes()?; let lo = self.span; Ok(Some(if self.eat_keyword(keywords::Let) { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Local(self.parse_local(attrs.into())?), span: lo.to(self.prev_span), } } else if let Some(macro_def) = self.eat_macro_def( &attrs, &source_map::respan(lo, VisibilityKind::Inherited), lo, )? { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Item(macro_def), span: lo.to(self.prev_span), } // Starts like a simple path, being careful to avoid contextual keywords // such as a union items, item with `crate` visibility or auto trait items. // Our goal here is to parse an arbitrary path `a::b::c` but not something that starts // like a path (1 token), but it fact not a path. // `union::b::c` - path, `union U { ... }` - not a path. // `crate::b::c` - path, `crate struct S;` - not a path. // `extern::b::c` - path, `extern crate c;` - not a path. } else if self.token.is_path_start() && !self.token.is_qpath_start() && !self.is_union_item() && !self.is_crate_vis() && !self.is_extern_non_path() && !self.is_existential_type_decl() && !self.is_auto_trait_item() { let pth = self.parse_path(PathStyle::Expr)?; if !self.eat(&token::Not) { let expr = if self.check(&token::OpenDelim(token::Brace)) { self.parse_struct_expr(lo, pth, ThinVec::new())? } else { let hi = self.prev_span; self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new()) }; let expr = self.with_res(Restrictions::STMT_EXPR, |this| { let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?; this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr)) })?; return Ok(Some(Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Expr(expr), span: lo.to(self.prev_span), })); } // it's a macro invocation let id = match self.token { token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier _ => self.parse_ident()?, }; // check that we're pointing at delimiters (need to check // again after the `if`, because of `parse_ident` // consuming more tokens). match self.token { token::OpenDelim(_) => {} _ => { // we only expect an ident if we didn't parse one // above. let ident_str = if id.name == keywords::Invalid.name() { "identifier, " } else { "" }; let tok_str = self.this_token_to_string(); let mut err = self.fatal(&format!("expected {}`(` or `{{`, found `{}`", ident_str, tok_str)); err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str)); return Err(err) }, } let (delim, tts) = self.expect_delimited_token_tree()?; let hi = self.prev_span; let style = if delim == MacDelimiter::Brace { MacStmtStyle::Braces } else { MacStmtStyle::NoBraces }; if id.name == keywords::Invalid.name() { let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim }); let node = if delim == MacDelimiter::Brace || self.token == token::Semi || self.token == token::Eof { StmtKind::Mac(P((mac, style, attrs.into()))) } // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | token::BinOp(token::And) | token::BinOp(token::Or) | token::AndAnd | token::OrOr | token::DotDot | token::DotDotDot | token::DotDotEq => false, _ => true, } { self.warn_missing_semicolon(); StmtKind::Mac(P((mac, style, attrs.into()))) } else { let e = self.mk_mac_expr(lo.to(hi), mac.node, ThinVec::new()); let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?; let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; StmtKind::Expr(e) }; Stmt { id: ast::DUMMY_NODE_ID, span: lo.to(hi), node, } } else { // if it has a special ident, it's definitely an item // // Require a semicolon or braces. if style != MacStmtStyle::Braces { if !self.eat(&token::Semi) { self.span_err(self.prev_span, "macros that expand to items must \ either be surrounded with braces or \ followed by a semicolon"); } } let span = lo.to(hi); Stmt { id: ast::DUMMY_NODE_ID, span, node: StmtKind::Item({ self.mk_item( span, id /*id is good here*/, ItemKind::Mac(respan(span, Mac_ { path: pth, tts, delim })), respan(lo, VisibilityKind::Inherited), attrs) }), } } } else { // FIXME: Bad copy of attrs let old_directory_ownership = mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock); let item = self.parse_item_(attrs.clone(), false, true)?; self.directory.ownership = old_directory_ownership; match item { Some(i) => Stmt { id: ast::DUMMY_NODE_ID, span: lo.to(i.span), node: StmtKind::Item(i), }, None => { let unused_attrs = |attrs: &[Attribute], s: &mut Self| { if !attrs.is_empty() { if s.prev_token_kind == PrevTokenKind::DocComment { s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit(); } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) { s.span_err(s.span, "expected statement after outer attribute"); } } }; // Do not attempt to parse an expression if we're done here. if self.token == token::Semi { unused_attrs(&attrs, self); self.bump(); return Ok(None); } if self.token == token::CloseDelim(token::Brace) { unused_attrs(&attrs, self); return Ok(None); } // Remainder are line-expr stmts. let e = self.parse_expr_res( Restrictions::STMT_EXPR, Some(attrs.into()))?; Stmt { id: ast::DUMMY_NODE_ID, span: lo.to(e.span), node: StmtKind::Expr(e), } } } })) } /// Is this expression a successfully-parsed statement? fn expr_is_complete(&mut self, e: &Expr) -> bool { self.restrictions.contains(Restrictions::STMT_EXPR) && !classify::expr_requires_semi_to_be_stmt(e) } /// Parse a block. No inner attrs are allowed. pub fn parse_block(&mut self) -> PResult<'a, P<Block>> { maybe_whole!(self, NtBlock, |x| x); let lo = self.span; if !self.eat(&token::OpenDelim(token::Brace)) { let sp = self.span; let tok = self.this_token_to_string(); let mut do_not_suggest_help = false; let mut e = self.span_fatal(sp, &format!("expected `{{`, found `{}`", tok)); if self.token.is_keyword(keywords::In) || self.token == token::Colon { do_not_suggest_help = true; e.span_label(sp, "expected `{`"); } // Check to see if the user has written something like // // if (cond) // bar; // // Which is valid in other languages, but not Rust. match self.parse_stmt_without_recovery(false) { Ok(Some(stmt)) => { if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) || do_not_suggest_help { // if the next token is an open brace (e.g., `if a b {`), the place- // inside-a-block suggestion would be more likely wrong than right return Err(e); } let mut stmt_span = stmt.span; // expand the span to include the semicolon, if it exists if self.eat(&token::Semi) { stmt_span = stmt_span.with_hi(self.prev_span.hi()); } let sugg = pprust::to_string(|s| { use print::pprust::{PrintState, INDENT_UNIT}; s.ibox(INDENT_UNIT)?; s.bopen()?; s.print_stmt(&stmt)?; s.bclose_maybe_open(stmt.span, INDENT_UNIT, false) }); e.span_suggestion_with_applicability( stmt_span, "try placing this code inside a block", sugg, // speculative, has been misleading in the past (closed Issue #46836) Applicability::MaybeIncorrect ); } Err(mut e) => { self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore); self.cancel(&mut e); } _ => () } return Err(e); } self.parse_block_tail(lo, BlockCheckMode::Default) } /// Parse a block. Inner attrs are allowed. fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); let lo = self.span; self.expect(&token::OpenDelim(token::Brace))?; Ok((self.parse_inner_attributes()?, self.parse_block_tail(lo, BlockCheckMode::Default)?)) } /// Parse the rest of a block expression or function body /// Precondition: already parsed the '{'. fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> { let mut stmts = vec![]; let mut recovered = false; while !self.eat(&token::CloseDelim(token::Brace)) { let stmt = match self.parse_full_stmt(false) { Err(mut err) => { err.emit(); self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore); self.eat(&token::CloseDelim(token::Brace)); recovered = true; break; } Ok(stmt) => stmt, }; if let Some(stmt) = stmt { stmts.push(stmt); } else if self.token == token::Eof { break; } else { // Found only `;` or `}`. continue; }; } Ok(P(ast::Block { stmts, id: ast::DUMMY_NODE_ID, rules: s, span: lo.to(self.prev_span), recovered, })) } /// Parse a statement, including the trailing semicolon. crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { // skip looking for a trailing semicolon when we have an interpolated statement maybe_whole!(self, NtStmt, |x| Some(x)); let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? { Some(stmt) => stmt, None => return Ok(None), }; match stmt.node { StmtKind::Expr(ref expr) if self.token != token::Eof => { // expression without semicolon if classify::expr_requires_semi_to_be_stmt(expr) { // Just check for errors and recover; do not eat semicolon yet. if let Err(mut e) = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)]) { e.emit(); self.recover_stmt(); } } } StmtKind::Local(..) => { // We used to incorrectly allow a macro-expanded let statement to lack a semicolon. if macro_legacy_warnings && self.token != token::Semi { self.warn_missing_semicolon(); } else { self.expect_one_of(&[], &[token::Semi])?; } } _ => {} } if self.eat(&token::Semi) { stmt = stmt.add_trailing_semicolon(); } stmt.span = stmt.span.with_hi(self.prev_span.hi()); Ok(Some(stmt)) } fn warn_missing_semicolon(&self) { self.diagnostic().struct_span_warn(self.span, { &format!("expected `;`, found `{}`", self.this_token_to_string()) }).note({ "This was erroneously allowed and will become a hard error in a future release" }).emit(); } fn err_dotdotdot_syntax(&self, span: Span) { self.diagnostic().struct_span_err(span, { "unexpected token: `...`" }).span_suggestion_with_applicability( span, "use `..` for an exclusive range", "..".to_owned(), Applicability::MaybeIncorrect ).span_suggestion_with_applicability( span, "or `..=` for an inclusive range", "..=".to_owned(), Applicability::MaybeIncorrect ).emit(); } // Parse bounds of a type parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. // BOUND = TY_BOUND | LT_BOUND // LT_BOUND = LIFETIME (e.g. `'a`) // TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN) // TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g. `?for<'a: 'b> m::Trait<'a>`) fn parse_generic_bounds_common(&mut self, allow_plus: bool) -> PResult<'a, GenericBounds> { let mut bounds = Vec::new(); loop { // This needs to be synchronized with `Token::can_begin_bound`. let is_bound_start = self.check_path() || self.check_lifetime() || self.check(&token::Question) || self.check_keyword(keywords::For) || self.check(&token::OpenDelim(token::Paren)); if is_bound_start { let lo = self.span; let has_parens = self.eat(&token::OpenDelim(token::Paren)); let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None }; if self.token.is_lifetime() { if let Some(question_span) = question { self.span_err(question_span, "`?` may only modify trait bounds, not lifetime bounds"); } bounds.push(GenericBound::Outlives(self.expect_lifetime())); if has_parens { self.expect(&token::CloseDelim(token::Paren))?; self.span_err(self.prev_span, "parenthesized lifetime bounds are not supported"); } } else { let lifetime_defs = self.parse_late_bound_lifetime_defs()?; let path = self.parse_path(PathStyle::Type)?; if has_parens { self.expect(&token::CloseDelim(token::Paren))?; } let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_span)); let modifier = if question.is_some() { TraitBoundModifier::Maybe } else { TraitBoundModifier::None }; bounds.push(GenericBound::Trait(poly_trait, modifier)); } } else { break } if !allow_plus || !self.eat_plus() { break } } return Ok(bounds); } fn parse_generic_bounds(&mut self) -> PResult<'a, GenericBounds> { self.parse_generic_bounds_common(true) } // Parse bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. // BOUND = LT_BOUND (e.g. `'a`) fn parse_lt_param_bounds(&mut self) -> GenericBounds { let mut lifetimes = Vec::new(); while self.check_lifetime() { lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime())); if !self.eat_plus() { break } } lifetimes } /// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )? fn parse_ty_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> { let ident = self.parse_ident()?; // Parse optional colon and param bounds. let bounds = if self.eat(&token::Colon) { self.parse_generic_bounds()? } else { Vec::new() }; let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None }; Ok(GenericParam { ident, id: ast::DUMMY_NODE_ID, attrs: preceding_attrs.into(), bounds, kind: GenericParamKind::Type { default, } }) } /// Parses the following grammar: /// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty] fn parse_trait_item_assoc_ty(&mut self) -> PResult<'a, (Ident, TraitItemKind, ast::Generics)> { let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; // Parse optional colon and param bounds. let bounds = if self.eat(&token::Colon) { self.parse_generic_bounds()? } else { Vec::new() }; generics.where_clause = self.parse_where_clause()?; let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None }; self.expect(&token::Semi)?; Ok((ident, TraitItemKind::Type(bounds, default), generics)) } /// Parses (possibly empty) list of lifetime and type parameters, possibly including /// trailing comma and erroneous trailing attributes. crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { let mut params = Vec::new(); let mut seen_ty_param = false; loop { let attrs = self.parse_outer_attributes()?; if self.check_lifetime() { let lifetime = self.expect_lifetime(); // Parse lifetime parameter. let bounds = if self.eat(&token::Colon) { self.parse_lt_param_bounds() } else { Vec::new() }; params.push(ast::GenericParam { ident: lifetime.ident, id: lifetime.id, attrs: attrs.into(), bounds, kind: ast::GenericParamKind::Lifetime, }); if seen_ty_param { self.span_err(self.prev_span, "lifetime parameters must be declared prior to type parameters"); } } else if self.check_ident() { // Parse type parameter. params.push(self.parse_ty_param(attrs)?); seen_ty_param = true; } else { // Check for trailing attributes and stop parsing. if !attrs.is_empty() { let param_kind = if seen_ty_param { "type" } else { "lifetime" }; self.span_err(attrs[0].span, &format!("trailing attribute after {} parameters", param_kind)); } break } if !self.eat(&token::Comma) { break } } Ok(params) } /// Parse a set of optional generic type parameter declarations. Where /// clauses are not parsed here, and must be added later via /// `parse_where_clause()`. /// /// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > ) /// | ( < lifetimes , typaramseq ( , )? > ) /// where typaramseq = ( typaram ) | ( typaram , typaramseq ) fn parse_generics(&mut self) -> PResult<'a, ast::Generics> { maybe_whole!(self, NtGenerics, |x| x); let span_lo = self.span; if self.eat_lt() { let params = self.parse_generic_params()?; self.expect_gt()?; Ok(ast::Generics { params, where_clause: WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), span: syntax_pos::DUMMY_SP, }, span: span_lo.to(self.prev_span), }) } else { Ok(ast::Generics::default()) } } /// Parses (possibly empty) list of lifetime and type arguments and associated type bindings, /// possibly including trailing comma. fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> { let mut args = Vec::new(); let mut bindings = Vec::new(); let mut seen_type = false; let mut seen_binding = false; loop { if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) { // Parse lifetime argument. args.push(GenericArg::Lifetime(self.expect_lifetime())); if seen_type || seen_binding { self.span_err(self.prev_span, "lifetime parameters must be declared prior to type parameters"); } } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) { // Parse associated type binding. let lo = self.span; let ident = self.parse_ident()?; self.bump(); let ty = self.parse_ty()?; bindings.push(TypeBinding { id: ast::DUMMY_NODE_ID, ident, ty, span: lo.to(self.prev_span), }); seen_binding = true; } else if self.check_type() { // Parse type argument. let ty_param = self.parse_ty()?; if seen_binding { self.span_err(ty_param.span, "type parameters must be declared prior to associated type bindings"); } args.push(GenericArg::Type(ty_param)); seen_type = true; } else { break } if !self.eat(&token::Comma) { break } } Ok((args, bindings)) } /// Parses an optional `where` clause and places it in `generics`. /// /// ```ignore (only-for-syntax-highlight) /// where T : Trait<U, V> + 'b, 'a : 'b /// ``` fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> { maybe_whole!(self, NtWhereClause, |x| x); let mut where_clause = WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), span: syntax_pos::DUMMY_SP, }; if !self.eat_keyword(keywords::Where) { return Ok(where_clause); } let lo = self.prev_span; // We are considering adding generics to the `where` keyword as an alternative higher-rank // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking // change we parse those generics now, but report an error. if self.choose_generics_over_qpath() { let generics = self.parse_generics()?; self.span_err(generics.span, "generic parameters on `where` clauses are reserved for future use"); } loop { let lo = self.span; if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) { let lifetime = self.expect_lifetime(); // Bounds starting with a colon are mandatory, but possibly empty. self.expect(&token::Colon)?; let bounds = self.parse_lt_param_bounds(); where_clause.predicates.push(ast::WherePredicate::RegionPredicate( ast::WhereRegionPredicate { span: lo.to(self.prev_span), lifetime, bounds, } )); } else if self.check_type() { // Parse optional `for<'a, 'b>`. // This `for` is parsed greedily and applies to the whole predicate, // the bounded type can have its own `for` applying only to it. // Example 1: for<'a> Trait1<'a>: Trait2<'a /*ok*/> // Example 2: (for<'a> Trait1<'a>): Trait2<'a /*not ok*/> // Example 3: for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /*ok*/, 'b /*not ok*/> let lifetime_defs = self.parse_late_bound_lifetime_defs()?; // Parse type with mandatory colon and (possibly empty) bounds, // or with mandatory equality sign and the second type. let ty = self.parse_ty()?; if self.eat(&token::Colon) { let bounds = self.parse_generic_bounds()?; where_clause.predicates.push(ast::WherePredicate::BoundPredicate( ast::WhereBoundPredicate { span: lo.to(self.prev_span), bound_generic_params: lifetime_defs, bounded_ty: ty, bounds, } )); // FIXME: Decide what should be used here, `=` or `==`. // FIXME: We are just dropping the binders in lifetime_defs on the floor here. } else if self.eat(&token::Eq) || self.eat(&token::EqEq) { let rhs_ty = self.parse_ty()?; where_clause.predicates.push(ast::WherePredicate::EqPredicate( ast::WhereEqPredicate { span: lo.to(self.prev_span), lhs_ty: ty, rhs_ty, id: ast::DUMMY_NODE_ID, } )); } else { return self.unexpected(); } } else { break } if !self.eat(&token::Comma) { break } } where_clause.span = lo.to(self.prev_span); Ok(where_clause) } fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool) -> PResult<'a, (Vec<Arg> , bool)> { let sp = self.span; let mut variadic = false; let args: Vec<Option<Arg>> = self.parse_unspanned_seq( &token::OpenDelim(token::Paren), &token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), |p| { if p.token == token::DotDotDot { p.bump(); variadic = true; if allow_variadic { if p.token != token::CloseDelim(token::Paren) { let span = p.span; p.span_err(span, "`...` must be last in argument list for variadic function"); } Ok(None) } else { let span = p.prev_span; if p.token == token::CloseDelim(token::Paren) { // continue parsing to present any further errors p.struct_span_err( span, "only foreign functions are allowed to be variadic" ).emit(); Ok(Some(dummy_arg(span))) } else { // this function definition looks beyond recovery, stop parsing p.span_err(span, "only foreign functions are allowed to be variadic"); Ok(None) } } } else { match p.parse_arg_general(named_args) { Ok(arg) => Ok(Some(arg)), Err(mut e) => { e.emit(); let lo = p.prev_span; // Skip every token until next possible arg or end. p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); // Create a placeholder argument for proper arg count (#34264). let span = lo.to(p.prev_span); Ok(Some(dummy_arg(span))) } } } } )?; let args: Vec<_> = args.into_iter().filter_map(|x| x).collect(); if variadic && args.is_empty() { self.span_err(sp, "variadic function must be declared with at least one named argument"); } Ok((args, variadic)) } /// Parse the argument list and result type of a function declaration fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> { let (args, variadic) = self.parse_fn_args(true, allow_variadic)?; let ret_ty = self.parse_ret_ty(true)?; Ok(P(FnDecl { inputs: args, output: ret_ty, variadic, })) } /// Returns the parsed optional self argument and whether a self shortcut was used. fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> { let expect_ident = |this: &mut Self| match this.token { // Preserve hygienic context. token::Ident(ident, _) => { let span = this.span; this.bump(); Ident::new(ident.name, span) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { this.look_ahead(n, |t| t.is_keyword(keywords::SelfValue)) && this.look_ahead(n + 1, |t| t != &token::ModSep) }; // Parse optional self parameter of a method. // Only a limited set of initial token sequences is considered self parameters, anything // else is parsed as a normal function parameter list, so some lookahead is required. let eself_lo = self.span; let (eself, eself_ident, eself_hi) = match self.token { token::BinOp(token::And) => { // &self // &mut self // &'lt self // &'lt mut self // &not_self (if isolated_self(self, 1) { self.bump(); SelfKind::Region(None, Mutability::Immutable) } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) && isolated_self(self, 2) { self.bump(); self.bump(); SelfKind::Region(None, Mutability::Mutable) } else if self.look_ahead(1, |t| t.is_lifetime()) && isolated_self(self, 2) { self.bump(); let lt = self.expect_lifetime(); SelfKind::Region(Some(lt), Mutability::Immutable) } else if self.look_ahead(1, |t| t.is_lifetime()) && self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) && isolated_self(self, 3) { self.bump(); let lt = self.expect_lifetime(); self.bump(); SelfKind::Region(Some(lt), Mutability::Mutable) } else { return Ok(None); }, expect_ident(self), self.prev_span) } token::BinOp(token::Star) => { // *self // *const self // *mut self // *not_self // Emit special error for `self` cases. (if isolated_self(self, 1) { self.bump(); self.span_err(self.span, "cannot pass `self` by raw pointer"); SelfKind::Value(Mutability::Immutable) } else if self.look_ahead(1, |t| t.is_mutability()) && isolated_self(self, 2) { self.bump(); self.bump(); self.span_err(self.span, "cannot pass `self` by raw pointer"); SelfKind::Value(Mutability::Immutable) } else { return Ok(None); }, expect_ident(self), self.prev_span) } token::Ident(..) => { if isolated_self(self, 0) { // self // self: TYPE let eself_ident = expect_ident(self); let eself_hi = self.prev_span; (if self.eat(&token::Colon) { let ty = self.parse_ty()?; SelfKind::Explicit(ty, Mutability::Immutable) } else { SelfKind::Value(Mutability::Immutable) }, eself_ident, eself_hi) } else if self.token.is_keyword(keywords::Mut) && isolated_self(self, 1) { // mut self // mut self: TYPE self.bump(); let eself_ident = expect_ident(self); let eself_hi = self.prev_span; (if self.eat(&token::Colon) { let ty = self.parse_ty()?; SelfKind::Explicit(ty, Mutability::Mutable) } else { SelfKind::Value(Mutability::Mutable) }, eself_ident, eself_hi) } else { return Ok(None); } } _ => return Ok(None), }; let eself = source_map::respan(eself_lo.to(eself_hi), eself); Ok(Some(Arg::from_self(eself, eself_ident))) } /// Parse the parameter list and result type of a function that may have a `self` parameter. fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>, { self.expect(&token::OpenDelim(token::Paren))?; // Parse optional self argument let self_arg = self.parse_self_arg()?; // Parse the rest of the function parameter list. let sep = SeqSep::trailing_allowed(token::Comma); let fn_inputs = if let Some(self_arg) = self_arg { if self.check(&token::CloseDelim(token::Paren)) { vec![self_arg] } else if self.eat(&token::Comma) { let mut fn_inputs = vec![self_arg]; fn_inputs.append(&mut self.parse_seq_to_before_end( &token::CloseDelim(token::Paren), sep, parse_arg_fn)? ); fn_inputs } else { return self.unexpected(); } } else { self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)? }; // Parse closing paren and return type. self.expect(&token::CloseDelim(token::Paren))?; Ok(P(FnDecl { inputs: fn_inputs, output: self.parse_ret_ty(true)?, variadic: false })) } // parse the |arg, arg| header on a lambda fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> { let inputs_captures = { if self.eat(&token::OrOr) { Vec::new() } else { self.expect(&token::BinOp(token::Or))?; let args = self.parse_seq_to_before_tokens( &[&token::BinOp(token::Or), &token::OrOr], SeqSep::trailing_allowed(token::Comma), TokenExpectType::NoExpect, |p| p.parse_fn_block_arg() )?; self.expect_or()?; args } }; let output = self.parse_ret_ty(true)?; Ok(P(FnDecl { inputs: inputs_captures, output, variadic: false })) } /// Parse the name and optional generic types of a function header. fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> { let id = self.parse_ident()?; let generics = self.parse_generics()?; Ok((id, generics)) } fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility, attrs: Vec<Attribute>) -> P<Item> { P(Item { ident, attrs, id: ast::DUMMY_NODE_ID, node, vis, span, tokens: None, }) } /// Parse an item-position function declaration. fn parse_item_fn(&mut self, unsafety: Unsafety, asyncness: IsAsync, constness: Spanned<Constness>, abi: Abi) -> PResult<'a, ItemInfo> { let (ident, mut generics) = self.parse_fn_header()?; let decl = self.parse_fn_decl(false)?; generics.where_clause = self.parse_where_clause()?; let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; let header = FnHeader { unsafety, asyncness, constness, abi }; Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs))) } /// true if we are looking at `const ID`, false for things like `const fn` etc fn is_const_item(&mut self) -> bool { self.token.is_keyword(keywords::Const) && !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) && !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) } /// parses all the "front matter" for a `fn` declaration, up to /// and including the `fn` keyword: /// /// - `const fn` /// - `unsafe fn` /// - `const unsafe fn` /// - `extern fn` /// - etc fn parse_fn_front_matter(&mut self) -> PResult<'a, ( Spanned<Constness>, Unsafety, IsAsync, Abi )> { let is_const_fn = self.eat_keyword(keywords::Const); let const_span = self.prev_span; let unsafety = self.parse_unsafety(); let asyncness = self.parse_asyncness(); let (constness, unsafety, abi) = if is_const_fn { (respan(const_span, Constness::Const), unsafety, Abi::Rust) } else { let abi = if self.eat_keyword(keywords::Extern) { self.parse_opt_abi()?.unwrap_or(Abi::C) } else { Abi::Rust }; (respan(self.prev_span, Constness::NotConst), unsafety, abi) }; self.expect_keyword(keywords::Fn)?; Ok((constness, unsafety, asyncness, abi)) } /// Parse an impl item. pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> { maybe_whole!(self, NtImplItem, |x| x); let attrs = self.parse_outer_attributes()?; let (mut item, tokens) = self.collect_tokens(|this| { this.parse_impl_item_(at_end, attrs) })?; // See `parse_item` for why this clause is here. if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { item.tokens = Some(tokens); } Ok(item) } fn parse_impl_item_(&mut self, at_end: &mut bool, mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> { let lo = self.span; let vis = self.parse_visibility(false)?; let defaultness = self.parse_defaultness(); let (name, node, generics) = if let Some(type_) = self.eat_type() { let (name, alias, generics) = type_?; let kind = match alias { AliasKind::Weak(typ) => ast::ImplItemKind::Type(typ), AliasKind::Existential(bounds) => ast::ImplItemKind::Existential(bounds), }; (name, kind, generics) } else if self.is_const_item() { // This parses the grammar: // ImplItemConst = "const" Ident ":" Ty "=" Expr ";" self.expect_keyword(keywords::Const)?; let name = self.parse_ident()?; self.expect(&token::Colon)?; let typ = self.parse_ty()?; self.expect(&token::Eq)?; let expr = self.parse_expr()?; self.expect(&token::Semi)?; (name, ast::ImplItemKind::Const(typ, expr), ast::Generics::default()) } else { let (name, inner_attrs, generics, node) = self.parse_impl_method(&vis, at_end)?; attrs.extend(inner_attrs); (name, node, generics) }; Ok(ImplItem { id: ast::DUMMY_NODE_ID, span: lo.to(self.prev_span), ident: name, vis, defaultness, attrs, generics, node, tokens: None, }) } fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) { if let Err(mut err) = self.complain_if_pub_macro_diag(vis, sp) { err.emit(); } } fn complain_if_pub_macro_diag(&mut self, vis: &VisibilityKind, sp: Span) -> PResult<'a, ()> { match *vis { VisibilityKind::Inherited => Ok(()), _ => { let is_macro_rules: bool = match self.token { token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"), _ => false, }; if is_macro_rules { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); err.span_suggestion_with_applicability( sp, "try exporting the macro", "#[macro_export]".to_owned(), Applicability::MaybeIncorrect // speculative ); Err(err) } else { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro invocation with `pub`"); err.help("try adjusting the macro to put `pub` inside the invocation"); Err(err) } } } } fn missing_assoc_item_kind_err(&mut self, item_type: &str, prev_span: Span) -> DiagnosticBuilder<'a> { let expected_kinds = if item_type == "extern" { "missing `fn`, `type`, or `static`" } else { "missing `fn`, `type`, or `const`" }; // Given this code `path(`, it seems like this is not // setting the visibility of a macro invocation, but rather // a mistyped method declaration. // Create a diagnostic pointing out that `fn` is missing. // // x | pub path(&self) { // | ^ missing `fn`, `type`, or `const` // pub path( // ^^ `sp` below will point to this let sp = prev_span.between(self.prev_span); let mut err = self.diagnostic().struct_span_err( sp, &format!("{} for {}-item declaration", expected_kinds, item_type)); err.span_label(sp, expected_kinds); err } /// Parse a method or a macro invocation in a trait impl. fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool) -> PResult<'a, (Ident, Vec<Attribute>, ast::Generics, ast::ImplItemKind)> { // code copied from parse_macro_use_or_failure... abstraction! if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? { // Method macro. Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(), ast::ImplItemKind::Macro(mac))) } else { let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?; let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; let decl = self.parse_fn_decl_with_self(|p| p.parse_arg())?; generics.where_clause = self.parse_where_clause()?; *at_end = true; let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; let header = ast::FnHeader { abi, unsafety, constness, asyncness }; Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method( ast::MethodSig { header, decl }, body ))) } } /// Parse `trait Foo { ... }` or `trait Foo = Bar;` fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> { let ident = self.parse_ident()?; let mut tps = self.parse_generics()?; // Parse optional colon and supertrait bounds. let bounds = if self.eat(&token::Colon) { self.parse_generic_bounds()? } else { Vec::new() }; if self.eat(&token::Eq) { // it's a trait alias let bounds = self.parse_generic_bounds()?; tps.where_clause = self.parse_where_clause()?; self.expect(&token::Semi)?; if unsafety != Unsafety::Normal { self.span_err(self.prev_span, "trait aliases cannot be unsafe"); } Ok((ident, ItemKind::TraitAlias(tps, bounds), None)) } else { // it's a normal trait tps.where_clause = self.parse_where_clause()?; self.expect(&token::OpenDelim(token::Brace))?; let mut trait_items = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { let mut at_end = false; match self.parse_trait_item(&mut at_end) { Ok(item) => trait_items.push(item), Err(mut e) => { e.emit(); if !at_end { self.recover_stmt_(SemiColonMode::Break, BlockMode::Break); } } } } Ok((ident, ItemKind::Trait(is_auto, unsafety, tps, bounds, trait_items), None)) } } fn choose_generics_over_qpath(&self) -> bool { // There's an ambiguity between generic parameters and qualified paths in impls. // If we see `<` it may start both, so we have to inspect some following tokens. // The following combinations can only start generics, // but not qualified paths (with one exception): // `<` `>` - empty generic parameters // `<` `#` - generic parameters with attributes // `<` (LIFETIME|IDENT) `>` - single generic parameter // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds // `<` (LIFETIME|IDENT) `=` - generic parameter with a default // The only truly ambiguous case is // `<` IDENT `>` `::` IDENT ... // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`) // because this is what almost always expected in practice, qualified paths in impls // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment. self.token == token::Lt && (self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) || self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) && self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma || t == &token::Colon || t == &token::Eq)) } fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> { self.expect(&token::OpenDelim(token::Brace))?; let attrs = self.parse_inner_attributes()?; let mut impl_items = Vec::new(); while !self.eat(&token::CloseDelim(token::Brace)) { let mut at_end = false; match self.parse_impl_item(&mut at_end) { Ok(impl_item) => impl_items.push(impl_item), Err(mut err) => { err.emit(); if !at_end { self.recover_stmt_(SemiColonMode::Break, BlockMode::Break); } } } } Ok((impl_items, attrs)) } /// Parses an implementation item, `impl` keyword is already parsed. /// impl<'a, T> TYPE { /* impl items */ } /// impl<'a, T> TRAIT for TYPE { /* impl items */ } /// impl<'a, T> !TRAIT for TYPE { /* impl items */ } /// We actually parse slightly more relaxed grammar for better error reporting and recovery. /// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}` /// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}` fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness) -> PResult<'a, ItemInfo> { // First, parse generic parameters if necessary. let mut generics = if self.choose_generics_over_qpath() { self.parse_generics()? } else { ast::Generics::default() }; // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type. let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) { self.bump(); // `!` ast::ImplPolarity::Negative } else { ast::ImplPolarity::Positive }; // Parse both types and traits as a type, then reinterpret if necessary. let ty_first = self.parse_ty()?; // If `for` is missing we try to recover. let has_for = self.eat_keyword(keywords::For); let missing_for_span = self.prev_span.between(self.span); let ty_second = if self.token == token::DotDot { // We need to report this error after `cfg` expansion for compatibility reasons self.bump(); // `..`, do not add it to expected tokens Some(P(Ty { node: TyKind::Err, span: self.prev_span, id: ast::DUMMY_NODE_ID })) } else if has_for || self.token.can_begin_type() { Some(self.parse_ty()?) } else { None }; generics.where_clause = self.parse_where_clause()?; let (impl_items, attrs) = self.parse_impl_body()?; let item_kind = match ty_second { Some(ty_second) => { // impl Trait for Type if !has_for { self.span_err(missing_for_span, "missing `for` in a trait impl"); } let ty_first = ty_first.into_inner(); let path = match ty_first.node { // This notably includes paths passed through `ty` macro fragments (#46438). TyKind::Path(None, path) => path, _ => { self.span_err(ty_first.span, "expected a trait, found type"); ast::Path::from_ident(Ident::new(keywords::Invalid.name(), ty_first.span)) } }; let trait_ref = TraitRef { path, ref_id: ty_first.id }; ItemKind::Impl(unsafety, polarity, defaultness, generics, Some(trait_ref), ty_second, impl_items) } None => { // impl Type ItemKind::Impl(unsafety, polarity, defaultness, generics, None, ty_first, impl_items) } }; Ok((keywords::Invalid.ident(), item_kind, Some(attrs))) } fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> { if self.eat_keyword(keywords::For) { self.expect_lt()?; let params = self.parse_generic_params()?; self.expect_gt()?; // We rely on AST validation to rule out invalid cases: There must not be type // parameters, and the lifetime parameters must not have bounds. Ok(params) } else { Ok(Vec::new()) } } /// Parse struct Foo { ... } fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> { let class_name = self.parse_ident()?; let mut generics = self.parse_generics()?; // There is a special case worth noting here, as reported in issue #17904. // If we are parsing a tuple struct it is the case that the where clause // should follow the field list. Like so: // // struct Foo<T>(T) where T: Copy; // // If we are parsing a normal record-style struct it is the case // that the where clause comes before the body, and after the generics. // So if we look ahead and see a brace or a where-clause we begin // parsing a record style struct. // // Otherwise if we look ahead and see a paren we parse a tuple-style // struct. let vdata = if self.token.is_keyword(keywords::Where) { generics.where_clause = self.parse_where_clause()?; if self.eat(&token::Semi) { // If we see a: `struct Foo<T> where T: Copy;` style decl. VariantData::Unit(ast::DUMMY_NODE_ID) } else { // If we see: `struct Foo<T> where T: Copy { ... }` VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID) } // No `where` so: `struct Foo<T>;` } else if self.eat(&token::Semi) { VariantData::Unit(ast::DUMMY_NODE_ID) // Record-style struct definition } else if self.token == token::OpenDelim(token::Brace) { VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID) // Tuple-style struct definition with optional where-clause. } else if self.token == token::OpenDelim(token::Paren) { let body = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID); generics.where_clause = self.parse_where_clause()?; self.expect(&token::Semi)?; body } else { let token_str = self.this_token_to_string(); let mut err = self.fatal(&format!( "expected `where`, `{{`, `(`, or `;` after struct name, found `{}`", token_str )); err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name"); return Err(err); }; Ok((class_name, ItemKind::Struct(vdata, generics), None)) } /// Parse union Foo { ... } fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> { let class_name = self.parse_ident()?; let mut generics = self.parse_generics()?; let vdata = if self.token.is_keyword(keywords::Where) { generics.where_clause = self.parse_where_clause()?; VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID) } else if self.token == token::OpenDelim(token::Brace) { VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID) } else { let token_str = self.this_token_to_string(); let mut err = self.fatal(&format!( "expected `where` or `{{` after union name, found `{}`", token_str)); err.span_label(self.span, "expected `where` or `{` after union name"); return Err(err); }; Ok((class_name, ItemKind::Union(vdata, generics), None)) } fn consume_block(&mut self, delim: token::DelimToken) { let mut brace_depth = 0; if !self.eat(&token::OpenDelim(delim)) { return; } loop { if self.eat(&token::OpenDelim(delim)) { brace_depth += 1; } else if self.eat(&token::CloseDelim(delim)) { if brace_depth == 0 { return; } else { brace_depth -= 1; continue; } } else if self.eat(&token::Eof) || self.eat(&token::CloseDelim(token::NoDelim)) { return; } else { self.bump(); } } } fn parse_record_struct_body(&mut self) -> PResult<'a, Vec<StructField>> { let mut fields = Vec::new(); if self.eat(&token::OpenDelim(token::Brace)) { while self.token != token::CloseDelim(token::Brace) { let field = self.parse_struct_decl_field().map_err(|e| { self.recover_stmt(); e }); match field { Ok(field) => fields.push(field), Err(mut err) => { err.emit(); } } } self.eat(&token::CloseDelim(token::Brace)); } else { let token_str = self.this_token_to_string(); let mut err = self.fatal(&format!( "expected `where`, or `{{` after struct name, found `{}`", token_str)); err.span_label(self.span, "expected `where`, or `{` after struct name"); return Err(err); } Ok(fields) } fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> { // This is the case where we find `struct Foo<T>(T) where T: Copy;` // Unit like structs are handled in parse_item_struct function let fields = self.parse_unspanned_seq( &token::OpenDelim(token::Paren), &token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), |p| { let attrs = p.parse_outer_attributes()?; let lo = p.span; let vis = p.parse_visibility(true)?; let ty = p.parse_ty()?; Ok(StructField { span: lo.to(ty.span), vis, ident: None, id: ast::DUMMY_NODE_ID, ty, attrs, }) })?; Ok(fields) } /// Parse a structure field declaration fn parse_single_struct_field(&mut self, lo: Span, vis: Visibility, attrs: Vec<Attribute> ) -> PResult<'a, StructField> { let mut seen_comma: bool = false; let a_var = self.parse_name_and_ty(lo, vis, attrs)?; if self.token == token::Comma { seen_comma = true; } match self.token { token::Comma => { self.bump(); } token::CloseDelim(token::Brace) => {} token::DocComment(_) => { let previous_span = self.prev_span; let mut err = self.span_fatal_err(self.span, Error::UselessDocComment); self.bump(); // consume the doc comment let comma_after_doc_seen = self.eat(&token::Comma); // `seen_comma` is always false, because we are inside doc block // condition is here to make code more readable if seen_comma == false && comma_after_doc_seen == true { seen_comma = true; } if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) { err.emit(); } else { if seen_comma == false { let sp = self.sess.source_map().next_point(previous_span); err.span_suggestion_with_applicability( sp, "missing comma here", ",".into(), Applicability::MachineApplicable ); } return Err(err); } } _ => { let sp = self.sess.source_map().next_point(self.prev_span); let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found `{}`", self.this_token_to_string())); if self.token.is_ident() { // This is likely another field; emit the diagnostic and keep going err.span_suggestion(sp, "try adding a comma", ",".into()); err.emit(); } else { return Err(err) } } } Ok(a_var) } /// Parse an element of a struct definition fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { let attrs = self.parse_outer_attributes()?; let lo = self.span; let vis = self.parse_visibility(false)?; self.parse_single_struct_field(lo, vis, attrs) } /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `pub(self)` for `pub(in self)` /// and `pub(super)` for `pub(in super)`. If the following element can't be a tuple (i.e. it's /// a function definition, it's not a tuple struct field) and the contents within the parens /// isn't valid, emit a proper diagnostic. pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> { maybe_whole!(self, NtVis, |x| x); self.expected_tokens.push(TokenType::Keyword(keywords::Crate)); if self.is_crate_vis() { self.bump(); // `crate` return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate))); } if !self.eat_keyword(keywords::Pub) { // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no // keyword to grab a span from for inherited visibility; an empty span at the // beginning of the current token would seem to be the "Schelling span". return Ok(respan(self.span.shrink_to_lo(), VisibilityKind::Inherited)) } let lo = self.prev_span; if self.check(&token::OpenDelim(token::Paren)) { // We don't `self.bump()` the `(` yet because this might be a struct definition where // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so // by the following tokens. if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) { // `pub(crate)` self.bump(); // `(` self.bump(); // `crate` self.expect(&token::CloseDelim(token::Paren))?; // `)` let vis = respan( lo.to(self.prev_span), VisibilityKind::Crate(CrateSugar::PubCrate), ); return Ok(vis) } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) { // `pub(in path)` self.bump(); // `(` self.bump(); // `in` let path = self.parse_path(PathStyle::Mod)?; // `path` self.expect(&token::CloseDelim(token::Paren))?; // `)` let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID, }); return Ok(vis) } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) && self.look_ahead(1, |t| t.is_keyword(keywords::Super) || t.is_keyword(keywords::SelfValue)) { // `pub(self)` or `pub(super)` self.bump(); // `(` let path = self.parse_path(PathStyle::Mod)?; // `super`/`self` self.expect(&token::CloseDelim(token::Paren))?; // `)` let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID, }); return Ok(vis) } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct // `pub(something) fn ...` or `struct X { pub(something) y: Z }` self.bump(); // `(` let msg = "incorrect visibility restriction"; let suggestion = r##"some possible visibility restrictions are: `pub(crate)`: visible only on the current crate `pub(super)`: visible only in the current module's parent `pub(in path::to::module)`: visible only on the specified path"##; let path = self.parse_path(PathStyle::Mod)?; let sp = self.prev_span; let help_msg = format!("make this visible only to module `{}` with `in`", path); self.expect(&token::CloseDelim(token::Paren))?; // `)` let mut err = struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg); err.help(suggestion); err.span_suggestion_with_applicability( sp, &help_msg, format!("in {}", path), Applicability::MachineApplicable ); err.emit(); // emit diagnostic, but continue with public visibility } } Ok(respan(lo, VisibilityKind::Public)) } /// Parse defaultness: `default` or nothing. fn parse_defaultness(&mut self) -> Defaultness { // `pub` is included for better error messages if self.check_keyword(keywords::Default) && self.look_ahead(1, |t| t.is_keyword(keywords::Impl) || t.is_keyword(keywords::Const) || t.is_keyword(keywords::Fn) || t.is_keyword(keywords::Unsafe) || t.is_keyword(keywords::Extern) || t.is_keyword(keywords::Type) || t.is_keyword(keywords::Pub)) { self.bump(); // `default` Defaultness::Default } else { Defaultness::Final } } /// Given a termination token, parse all of the items in a module fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); } if !self.eat(term) { let token_str = self.this_token_to_string(); let mut err = self.fatal(&format!("expected item, found `{}`", token_str)); if token_str == ";" { let msg = "consider removing this semicolon"; err.span_suggestion_short_with_applicability( self.span, msg, "".to_string(), Applicability::MachineApplicable ); if !items.is_empty() { // Issue #51603 let previous_item = &items[items.len()-1]; let previous_item_kind_name = match previous_item.node { // say "braced struct" because tuple-structs and // braceless-empty-struct declarations do take a semicolon ItemKind::Struct(..) => Some("braced struct"), ItemKind::Enum(..) => Some("enum"), ItemKind::Trait(..) => Some("trait"), ItemKind::Union(..) => Some("union"), _ => None, }; if let Some(name) = previous_item_kind_name { err.help(&format!("{} declarations are not followed by a semicolon", name)); } } } else { err.span_label(self.span, "expected item"); } return Err(err); } let hi = if self.span.is_dummy() { inner_lo } else { self.prev_span }; Ok(ast::Mod { inner: inner_lo.to(hi), items, }) } fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> { let id = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; self.expect(&token::Eq)?; let e = self.parse_expr()?; self.expect(&token::Semi)?; let item = match m { Some(m) => ItemKind::Static(ty, m, e), None => ItemKind::Const(ty, e), }; Ok((id, item, None)) } /// Parse a `mod <foo> { ... }` or `mod <foo>;` item fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> { let (in_cfg, outer_attrs) = { let mut strip_unconfigured = ::config::StripUnconfigured { sess: self.sess, should_test: false, // irrelevant features: None, // don't perform gated feature checking }; let outer_attrs = strip_unconfigured.process_cfg_attrs(outer_attrs.to_owned()); (!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs) }; let id_span = self.span; let id = self.parse_ident()?; if self.check(&token::Semi) { self.bump(); if in_cfg && self.recurse_into_file_modules { // This mod is in an external file. Let's go get it! let ModulePathSuccess { path, directory_ownership, warn } = self.submod_path(id, &outer_attrs, id_span)?; let (module, mut attrs) = self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?; if warn { let attr = Attribute { id: attr::mk_attr_id(), style: ast::AttrStyle::Outer, path: ast::Path::from_ident(Ident::from_str("warn_directory_ownership")), tokens: TokenStream::empty(), is_sugared_doc: false, span: syntax_pos::DUMMY_SP, }; attr::mark_known(&attr); attrs.push(attr); } Ok((id, module, Some(attrs))) } else { let placeholder = ast::Mod { inner: syntax_pos::DUMMY_SP, items: Vec::new() }; Ok((id, ItemKind::Mod(placeholder), None)) } } else { let old_directory = self.directory.clone(); self.push_directory(id, &outer_attrs); self.expect(&token::OpenDelim(token::Brace))?; let mod_inner_lo = self.span; let attrs = self.parse_inner_attributes()?; let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?; self.directory = old_directory; Ok((id, ItemKind::Mod(module), Some(attrs))) } } fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) { if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") { self.directory.path.to_mut().push(&path.as_str()); self.directory.ownership = DirectoryOwnership::Owned { relative: None }; } else { self.directory.path.to_mut().push(&id.as_str()); } } pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> { if let Some(s) = attr::first_attr_value_str_by_name(attrs, "path") { let s = s.as_str(); // On windows, the base path might have the form // `\\?\foo\bar` in which case it does not tolerate // mixed `/` and `\` separators, so canonicalize // `/` to `\`. #[cfg(windows)] let s = s.replace("/", "\\"); Some(dir_path.join(s)) } else { None } } /// Returns either a path to a module, or . pub fn default_submod_path( id: ast::Ident, relative: Option<ast::Ident>, dir_path: &Path, source_map: &SourceMap) -> ModulePath { // If we're in a foo.rs file instead of a mod.rs file, // we need to look for submodules in // `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than // `./<id>.rs` and `./<id>/mod.rs`. let relative_prefix_string; let relative_prefix = if let Some(ident) = relative { relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR); &relative_prefix_string } else { "" }; let mod_name = id.to_string(); let default_path_str = format!("{}{}.rs", relative_prefix, mod_name); let secondary_path_str = format!("{}{}{}mod.rs", relative_prefix, mod_name, path::MAIN_SEPARATOR); let default_path = dir_path.join(&default_path_str); let secondary_path = dir_path.join(&secondary_path_str); let default_exists = source_map.file_exists(&default_path); let secondary_exists = source_map.file_exists(&secondary_path); let result = match (default_exists, secondary_exists) { (true, false) => Ok(ModulePathSuccess { path: default_path, directory_ownership: DirectoryOwnership::Owned { relative: Some(id), }, warn: false, }), (false, true) => Ok(ModulePathSuccess { path: secondary_path, directory_ownership: DirectoryOwnership::Owned { relative: None, }, warn: false, }), (false, false) => Err(Error::FileNotFoundForModule { mod_name: mod_name.clone(), default_path: default_path_str, secondary_path: secondary_path_str, dir_path: dir_path.display().to_string(), }), (true, true) => Err(Error::DuplicatePaths { mod_name: mod_name.clone(), default_path: default_path_str, secondary_path: secondary_path_str, }), }; ModulePath { name: mod_name, path_exists: default_exists || secondary_exists, result, } } fn submod_path(&mut self, id: ast::Ident, outer_attrs: &[Attribute], id_sp: Span) -> PResult<'a, ModulePathSuccess> { if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) { return Ok(ModulePathSuccess { directory_ownership: match path.file_name().and_then(|s| s.to_str()) { // All `#[path]` files are treated as though they are a `mod.rs` file. // This means that `mod foo;` declarations inside `#[path]`-included // files are siblings, // // Note that this will produce weirdness when a file named `foo.rs` is // `#[path]` included and contains a `mod foo;` declaration. // If you encounter this, it's your own darn fault :P Some(_) => DirectoryOwnership::Owned { relative: None }, _ => DirectoryOwnership::UnownedViaMod(true), }, path, warn: false, }); } let relative = match self.directory.ownership { DirectoryOwnership::Owned { relative } => { // Push the usage onto the list of non-mod.rs mod uses. // This is used later for feature-gate error reporting. if let Some(cur_file_ident) = relative { self.sess .non_modrs_mods.borrow_mut() .push((cur_file_ident, id_sp)); } relative }, DirectoryOwnership::UnownedViaBlock | DirectoryOwnership::UnownedViaMod(_) => None, }; let paths = Parser::default_submod_path( id, relative, &self.directory.path, self.sess.source_map()); match self.directory.ownership { DirectoryOwnership::Owned { .. } => { paths.result.map_err(|err| self.span_fatal_err(id_sp, err)) }, DirectoryOwnership::UnownedViaBlock => { let msg = "Cannot declare a non-inline module inside a block \ unless it has a path attribute"; let mut err = self.diagnostic().struct_span_err(id_sp, msg); if paths.path_exists { let msg = format!("Maybe `use` the module `{}` instead of redeclaring it", paths.name); err.span_note(id_sp, &msg); } Err(err) } DirectoryOwnership::UnownedViaMod(warn) => { if warn { if let Ok(result) = paths.result { return Ok(ModulePathSuccess { warn: true, ..result }); } } let mut err = self.diagnostic().struct_span_err(id_sp, "cannot declare a new module at this location"); if !id_sp.is_dummy() { let src_path = self.sess.source_map().span_to_filename(id_sp); if let FileName::Real(src_path) = src_path { if let Some(stem) = src_path.file_stem() { let mut dest_path = src_path.clone(); dest_path.set_file_name(stem); dest_path.push("mod.rs"); err.span_note(id_sp, &format!("maybe move this module `{}` to its own \ directory via `{}`", src_path.display(), dest_path.display())); } } } if paths.path_exists { err.span_note(id_sp, &format!("... or maybe `use` the module `{}` instead \ of possibly redeclaring it", paths.name)); } Err(err) } } } /// Read a module from a source file. fn eval_src_mod(&mut self, path: PathBuf, directory_ownership: DirectoryOwnership, name: String, id_sp: Span) -> PResult<'a, (ast::ItemKind, Vec<Attribute> )> { let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut(); if let Some(i) = included_mod_stack.iter().position(|p| *p == path) { let mut err = String::from("circular modules: "); let len = included_mod_stack.len(); for p in &included_mod_stack[i.. len] { err.push_str(&p.to_string_lossy()); err.push_str(" -> "); } err.push_str(&path.to_string_lossy()); return Err(self.span_fatal(id_sp, &err[..])); } included_mod_stack.push(path.clone()); drop(included_mod_stack); let mut p0 = new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp); p0.cfg_mods = self.cfg_mods; let mod_inner_lo = p0.span; let mod_attrs = p0.parse_inner_attributes()?; let m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?; self.sess.included_mod_stack.borrow_mut().pop(); Ok((ast::ItemKind::Mod(m0), mod_attrs)) } /// Parse a function declaration from a foreign module fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Fn)?; let (ident, mut generics) = self.parse_fn_header()?; let decl = self.parse_fn_decl(true)?; generics.where_clause = self.parse_where_clause()?; let hi = self.span; self.expect(&token::Semi)?; Ok(ast::ForeignItem { ident, attrs, node: ForeignItemKind::Fn(decl, generics), id: ast::DUMMY_NODE_ID, span: lo.to(hi), vis, }) } /// Parse a static item from a foreign module. /// Assumes that the `static` keyword is already parsed. fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, ForeignItem> { let mutbl = self.eat_keyword(keywords::Mut); let ident = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; let hi = self.span; self.expect(&token::Semi)?; Ok(ForeignItem { ident, attrs, node: ForeignItemKind::Static(ty, mutbl), id: ast::DUMMY_NODE_ID, span: lo.to(hi), vis, }) } /// Parse a type from a foreign module fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Type)?; let ident = self.parse_ident()?; let hi = self.span; self.expect(&token::Semi)?; Ok(ast::ForeignItem { ident: ident, attrs: attrs, node: ForeignItemKind::Ty, id: ast::DUMMY_NODE_ID, span: lo.to(hi), vis: vis }) } fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> { let error_msg = "crate name using dashes are not valid in `extern crate` statements"; let suggestion_msg = "if the original crate name uses dashes you need to use underscores \ in the code"; let mut ident = self.parse_ident()?; let mut idents = vec![]; let mut replacement = vec![]; let mut fixed_crate_name = false; // Accept `extern crate name-like-this` for better diagnostics let dash = token::Token::BinOp(token::BinOpToken::Minus); if self.token == dash { // Do not include `-` as part of the expected tokens list while self.eat(&dash) { fixed_crate_name = true; replacement.push((self.prev_span, "_".to_string())); idents.push(self.parse_ident()?); } } if fixed_crate_name { let fixed_name_sp = ident.span.to(idents.last().unwrap().span); let mut fixed_name = format!("{}", ident.name); for part in idents { fixed_name.push_str(&format!("_{}", part.name)); } ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp); let mut err = self.struct_span_err(fixed_name_sp, error_msg); err.span_label(fixed_name_sp, "dash-separated idents are not valid"); err.multipart_suggestion(suggestion_msg, replacement); err.emit(); } Ok(ident) } /// Parse extern crate links /// /// # Examples /// /// extern crate foo; /// extern crate bar as foo; fn parse_item_extern_crate(&mut self, lo: Span, visibility: Visibility, attrs: Vec<Attribute>) -> PResult<'a, P<Item>> { // Accept `extern crate name-like-this` for better diagnostics let orig_name = self.parse_crate_name_with_dashes()?; let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? { (rename, Some(orig_name.name)) } else { (orig_name, None) }; self.expect(&token::Semi)?; let span = lo.to(self.prev_span); Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs)) } /// Parse `extern` for foreign ABIs /// modules. /// /// `extern` is expected to have been /// consumed before calling this method /// /// # Examples: /// /// extern "C" {} /// extern {} fn parse_item_foreign_mod(&mut self, lo: Span, opt_abi: Option<Abi>, visibility: Visibility, mut attrs: Vec<Attribute>) -> PResult<'a, P<Item>> { self.expect(&token::OpenDelim(token::Brace))?; let abi = opt_abi.unwrap_or(Abi::C); attrs.extend(self.parse_inner_attributes()?); let mut foreign_items = vec![]; while let Some(item) = self.parse_foreign_item()? { foreign_items.push(item); } self.expect(&token::CloseDelim(token::Brace))?; let prev_span = self.prev_span; let m = ast::ForeignMod { abi, items: foreign_items }; let invalid = keywords::Invalid.ident(); Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs)) } /// Parse type Foo = Bar; /// or /// existential type Foo: Bar; /// or /// return None without modifying the parser state fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> { // This parses the grammar: // Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";" if self.check_keyword(keywords::Type) || self.check_keyword(keywords::Existential) && self.look_ahead(1, |t| t.is_keyword(keywords::Type)) { let existential = self.eat_keyword(keywords::Existential); assert!(self.eat_keyword(keywords::Type)); Some(self.parse_existential_or_alias(existential)) } else { None } } /// Parse type alias or existential type fn parse_existential_or_alias( &mut self, existential: bool, ) -> PResult<'a, (Ident, AliasKind, ast::Generics)> { let ident = self.parse_ident()?; let mut tps = self.parse_generics()?; tps.where_clause = self.parse_where_clause()?; let alias = if existential { self.expect(&token::Colon)?; let bounds = self.parse_generic_bounds()?; AliasKind::Existential(bounds) } else { self.expect(&token::Eq)?; let ty = self.parse_ty()?; AliasKind::Weak(ty) }; self.expect(&token::Semi)?; Ok((ident, alias, tps)) } /// Parse the part of an "enum" decl following the '{' fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> { let mut variants = Vec::new(); let mut all_nullary = true; let mut any_disr = None; while self.token != token::CloseDelim(token::Brace) { let variant_attrs = self.parse_outer_attributes()?; let vlo = self.span; let struct_def; let mut disr_expr = None; let ident = self.parse_ident()?; if self.check(&token::OpenDelim(token::Brace)) { // Parse a struct variant. all_nullary = false; struct_def = VariantData::Struct(self.parse_record_struct_body()?, ast::DUMMY_NODE_ID); } else if self.check(&token::OpenDelim(token::Paren)) { all_nullary = false; struct_def = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID); } else if self.eat(&token::Eq) { disr_expr = Some(AnonConst { id: ast::DUMMY_NODE_ID, value: self.parse_expr()?, }); any_disr = disr_expr.as_ref().map(|c| c.value.span); struct_def = VariantData::Unit(ast::DUMMY_NODE_ID); } else { struct_def = VariantData::Unit(ast::DUMMY_NODE_ID); } let vr = ast::Variant_ { ident, attrs: variant_attrs, data: struct_def, disr_expr, }; variants.push(respan(vlo.to(self.prev_span), vr)); if !self.eat(&token::Comma) { break; } } self.expect(&token::CloseDelim(token::Brace))?; match any_disr { Some(disr_span) if !all_nullary => self.span_err(disr_span, "discriminator values can only be used with a field-less enum"), _ => () } Ok(ast::EnumDef { variants: variants }) } /// Parse an "enum" declaration fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> { let id = self.parse_ident()?; let mut generics = self.parse_generics()?; generics.where_clause = self.parse_where_clause()?; self.expect(&token::OpenDelim(token::Brace))?; let enum_definition = self.parse_enum_def(&generics).map_err(|e| { self.recover_stmt(); self.eat(&token::CloseDelim(token::Brace)); e })?; Ok((id, ItemKind::Enum(enum_definition, generics), None)) } /// Parses a string as an ABI spec on an extern type or module. Consumes /// the `extern` keyword, if one is found. fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> { match self.token { token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => { let sp = self.span; self.expect_no_suffix(sp, "ABI spec", suf); self.bump(); match abi::lookup(&s.as_str()) { Some(abi) => Ok(Some(abi)), None => { let prev_span = self.prev_span; let mut err = struct_span_err!( self.sess.span_diagnostic, prev_span, E0703, "invalid ABI: found `{}`", s); err.span_label(prev_span, "invalid ABI"); err.help(&format!("valid ABIs: {}", abi::all_names().join(", "))); err.emit(); Ok(None) } } } _ => Ok(None), } } fn is_static_global(&mut self) -> bool { if self.check_keyword(keywords::Static) { // Check if this could be a closure !self.look_ahead(1, |token| { if token.is_keyword(keywords::Move) { return true; } match *token { token::BinOp(token::Or) | token::OrOr => true, _ => false, } }) } else { false } } fn parse_item_( &mut self, attrs: Vec<Attribute>, macros_allowed: bool, attributes_allowed: bool, ) -> PResult<'a, Option<P<Item>>> { let (ret, tokens) = self.collect_tokens(|this| { this.parse_item_implementation(attrs, macros_allowed, attributes_allowed) })?; // Once we've parsed an item and recorded the tokens we got while // parsing we may want to store `tokens` into the item we're about to // return. Note, though, that we specifically didn't capture tokens // related to outer attributes. The `tokens` field here may later be // used with procedural macros to convert this item back into a token // stream, but during expansion we may be removing attributes as we go // along. // // If we've got inner attributes then the `tokens` we've got above holds // these inner attributes. If an inner attribute is expanded we won't // actually remove it from the token stream, so we'll just keep yielding // it (bad!). To work around this case for now we just avoid recording // `tokens` if we detect any inner attributes. This should help keep // expansion correct, but we should fix this bug one day! Ok(ret.map(|item| { item.map(|mut i| { if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { i.tokens = Some(tokens); } i }) })) } /// Parse one of the items allowed by the flags. fn parse_item_implementation( &mut self, attrs: Vec<Attribute>, macros_allowed: bool, attributes_allowed: bool, ) -> PResult<'a, Option<P<Item>>> { maybe_whole!(self, NtItem, |item| { let mut item = item.into_inner(); let mut attrs = attrs; mem::swap(&mut item.attrs, &mut attrs); item.attrs.extend(attrs); Some(P(item)) }); let lo = self.span; let visibility = self.parse_visibility(false)?; if self.eat_keyword(keywords::Use) { // USE ITEM let item_ = ItemKind::Use(P(self.parse_use_tree()?)); self.expect(&token::Semi)?; let span = lo.to(self.prev_span); let item = self.mk_item(span, keywords::Invalid.ident(), item_, visibility, attrs); return Ok(Some(item)); } if self.check_keyword(keywords::Extern) && self.is_extern_non_path() { self.bump(); // `extern` if self.eat_keyword(keywords::Crate) { return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?)); } let opt_abi = self.parse_opt_abi()?; if self.eat_keyword(keywords::Fn) { // EXTERN FUNCTION ITEM let fn_span = self.prev_span; let abi = opt_abi.unwrap_or(Abi::C); let (ident, item_, extra_attrs) = self.parse_item_fn(Unsafety::Normal, IsAsync::NotAsync, respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } else if self.check(&token::OpenDelim(token::Brace)) { return Ok(Some(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs)?)); } self.unexpected()?; } if self.is_static_global() { self.bump(); // STATIC ITEM let m = if self.eat_keyword(keywords::Mut) { Mutability::Mutable } else { Mutability::Immutable }; let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.eat_keyword(keywords::Const) { let const_span = self.prev_span; if self.check_keyword(keywords::Fn) || (self.check_keyword(keywords::Unsafe) && self.look_ahead(1, |t| t.is_keyword(keywords::Fn))) { // CONST FUNCTION ITEM let unsafety = self.parse_unsafety(); self.bump(); let (ident, item_, extra_attrs) = self.parse_item_fn(unsafety, IsAsync::NotAsync, respan(const_span, Constness::Const), Abi::Rust)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } // CONST ITEM if self.eat_keyword(keywords::Mut) { let prev_span = self.prev_span; self.diagnostic().struct_span_err(prev_span, "const globals cannot be mutable") .help("did you mean to declare a static?") .emit(); } let (ident, item_, extra_attrs) = self.parse_item_const(None)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } // `unsafe async fn` or `async fn` if ( self.check_keyword(keywords::Unsafe) && self.look_ahead(1, |t| t.is_keyword(keywords::Async)) ) || ( self.check_keyword(keywords::Async) && self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) ) { // ASYNC FUNCTION ITEM let unsafety = self.parse_unsafety(); self.expect_keyword(keywords::Async)?; self.expect_keyword(keywords::Fn)?; let fn_span = self.prev_span; let (ident, item_, extra_attrs) = self.parse_item_fn(unsafety, IsAsync::Async { closure_id: ast::DUMMY_NODE_ID, return_impl_trait_id: ast::DUMMY_NODE_ID, }, respan(fn_span, Constness::NotConst), Abi::Rust)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.check_keyword(keywords::Unsafe) && (self.look_ahead(1, |t| t.is_keyword(keywords::Trait)) || self.look_ahead(1, |t| t.is_keyword(keywords::Auto))) { // UNSAFE TRAIT ITEM self.bump(); // `unsafe` let is_auto = if self.eat_keyword(keywords::Trait) { IsAuto::No } else { self.expect_keyword(keywords::Auto)?; self.expect_keyword(keywords::Trait)?; IsAuto::Yes }; let (ident, item_, extra_attrs) = self.parse_item_trait(is_auto, Unsafety::Unsafe)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.check_keyword(keywords::Impl) || self.check_keyword(keywords::Unsafe) && self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) || self.check_keyword(keywords::Default) && self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) || self.check_keyword(keywords::Default) && self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) { // IMPL ITEM let defaultness = self.parse_defaultness(); let unsafety = self.parse_unsafety(); self.expect_keyword(keywords::Impl)?; let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?; let span = lo.to(self.prev_span); return Ok(Some(self.mk_item(span, ident, item, visibility, maybe_append(attrs, extra_attrs)))); } if self.check_keyword(keywords::Fn) { // FUNCTION ITEM self.bump(); let fn_span = self.prev_span; let (ident, item_, extra_attrs) = self.parse_item_fn(Unsafety::Normal, IsAsync::NotAsync, respan(fn_span, Constness::NotConst), Abi::Rust)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.check_keyword(keywords::Unsafe) && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) { // UNSAFE FUNCTION ITEM self.bump(); // `unsafe` // `{` is also expected after `unsafe`, in case of error, include it in the diagnostic self.check(&token::OpenDelim(token::Brace)); let abi = if self.eat_keyword(keywords::Extern) { self.parse_opt_abi()?.unwrap_or(Abi::C) } else { Abi::Rust }; self.expect_keyword(keywords::Fn)?; let fn_span = self.prev_span; let (ident, item_, extra_attrs) = self.parse_item_fn(Unsafety::Unsafe, IsAsync::NotAsync, respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = self.parse_item_mod(&attrs[..])?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if let Some(type_) = self.eat_type() { let (ident, alias, generics) = type_?; // TYPE ITEM let item_ = match alias { AliasKind::Weak(ty) => ItemKind::Ty(ty, generics), AliasKind::Existential(bounds) => ItemKind::Existential(bounds, generics), }; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, attrs); return Ok(Some(item)); } if self.eat_keyword(keywords::Enum) { // ENUM ITEM let (ident, item_, extra_attrs) = self.parse_item_enum()?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.check_keyword(keywords::Trait) || (self.check_keyword(keywords::Auto) && self.look_ahead(1, |t| t.is_keyword(keywords::Trait))) { let is_auto = if self.eat_keyword(keywords::Trait) { IsAuto::No } else { self.expect_keyword(keywords::Auto)?; self.expect_keyword(keywords::Trait)?; IsAuto::Yes }; // TRAIT ITEM let (ident, item_, extra_attrs) = self.parse_item_trait(is_auto, Unsafety::Normal)?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.eat_keyword(keywords::Struct) { // STRUCT ITEM let (ident, item_, extra_attrs) = self.parse_item_struct()?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if self.is_union_item() { // UNION ITEM self.bump(); let (ident, item_, extra_attrs) = self.parse_item_union()?; let prev_span = self.prev_span; let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? { return Ok(Some(macro_def)); } // Verify whether we have encountered a struct or method definition where the user forgot to // add the `struct` or `fn` keyword after writing `pub`: `pub S {}` if visibility.node.is_pub() && self.check_ident() && self.look_ahead(1, |t| *t != token::Not) { // Space between `pub` keyword and the identifier // // pub S {} // ^^^ `sp` points here let sp = self.prev_span.between(self.span); let full_sp = self.prev_span.to(self.span); let ident_sp = self.span; if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) { // possible public struct definition where `struct` was forgotten let ident = self.parse_ident().unwrap(); let msg = format!("add `struct` here to parse `{}` as a public struct", ident); let mut err = self.diagnostic() .struct_span_err(sp, "missing `struct` for struct definition"); err.span_suggestion_short_with_applicability( sp, &msg, " struct ".into(), Applicability::MaybeIncorrect // speculative ); return Err(err); } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) { let ident = self.parse_ident().unwrap(); self.consume_block(token::Paren); let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) || self.check(&token::OpenDelim(token::Brace)) { ("fn", "method", false) } else if self.check(&token::Colon) { let kw = "struct"; (kw, kw, false) } else { ("fn` or `struct", "method or struct", true) }; let msg = format!("missing `{}` for {} definition", kw, kw_name); let mut err = self.diagnostic().struct_span_err(sp, &msg); if !ambiguous { let suggestion = format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name); err.span_suggestion_short_with_applicability( sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable ); } else { if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) { err.span_suggestion_with_applicability( full_sp, "if you meant to call a macro, try", format!("{}!", snippet), // this is the `ambiguous` conditional branch Applicability::MaybeIncorrect ); } else { err.help("if you meant to call a macro, remove the `pub` \ and add a trailing `!` after the identifier"); } } return Err(err); } } self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility) } /// Parse a foreign item. crate fn parse_foreign_item(&mut self) -> PResult<'a, Option<ForeignItem>> { maybe_whole!(self, NtForeignItem, |ni| Some(ni)); let attrs = self.parse_outer_attributes()?; let lo = self.span; let visibility = self.parse_visibility(false)?; // FOREIGN STATIC ITEM // Treat `const` as `static` for error recovery, but don't add it to expected tokens. if self.check_keyword(keywords::Static) || self.token.is_keyword(keywords::Const) { if self.token.is_keyword(keywords::Const) { self.diagnostic() .struct_span_err(self.span, "extern items cannot be `const`") .span_suggestion_with_applicability( self.span, "try using a static value", "static".to_owned(), Applicability::MachineApplicable ).emit(); } self.bump(); // `static` or `const` return Ok(Some(self.parse_item_foreign_static(visibility, lo, attrs)?)); } // FOREIGN FUNCTION ITEM if self.check_keyword(keywords::Fn) { return Ok(Some(self.parse_item_foreign_fn(visibility, lo, attrs)?)); } // FOREIGN TYPE ITEM if self.check_keyword(keywords::Type) { return Ok(Some(self.parse_item_foreign_type(visibility, lo, attrs)?)); } match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? { Some(mac) => { Ok(Some( ForeignItem { ident: keywords::Invalid.ident(), span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID, attrs, vis: visibility, node: ForeignItemKind::Macro(mac), } )) } None => { if !attrs.is_empty() { self.expected_item_err(&attrs); } Ok(None) } } } /// This is the fall-through for parsing items. fn parse_macro_use_or_failure( &mut self, attrs: Vec<Attribute> , macros_allowed: bool, attributes_allowed: bool, lo: Span, visibility: Visibility ) -> PResult<'a, Option<P<Item>>> { if macros_allowed && self.token.is_path_start() { // MACRO INVOCATION ITEM let prev_span = self.prev_span; self.complain_if_pub_macro(&visibility.node, prev_span); let mac_lo = self.span; // item macro. let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax // and remove this. let id = if self.token.is_ident() { self.parse_ident()? } else { keywords::Invalid.ident() // no special identifier }; // eat a matched-delimiter token tree: let (delim, tts) = self.expect_delimited_token_tree()?; if delim != MacDelimiter::Brace { if !self.eat(&token::Semi) { self.span_err(self.prev_span, "macros that expand to items must either \ be surrounded with braces or followed by \ a semicolon"); } } let hi = self.prev_span; let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts, delim }); let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs); return Ok(Some(item)); } // FAILURE TO PARSE ITEM match visibility.node { VisibilityKind::Inherited => {} _ => { return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`")); } } if !attributes_allowed && !attrs.is_empty() { self.expected_item_err(&attrs); } Ok(None) } /// Parse a macro invocation inside a `trait`, `impl` or `extern` block fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>, at_end: &mut bool) -> PResult<'a, Option<Mac>> { if self.token.is_path_start() && !self.is_extern_non_path() { let prev_span = self.prev_span; let lo = self.span; let pth = self.parse_path(PathStyle::Mod)?; if pth.segments.len() == 1 { if !self.eat(&token::Not) { return Err(self.missing_assoc_item_kind_err(item_kind, prev_span)); } } else { self.expect(&token::Not)?; } if let Some(vis) = vis { self.complain_if_pub_macro(&vis.node, prev_span); } *at_end = true; // eat a matched-delimiter token tree: let (delim, tts) = self.expect_delimited_token_tree()?; if delim != MacDelimiter::Brace { self.expect(&token::Semi)? } Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim }))) } else { Ok(None) } } fn collect_tokens<F, R>(&mut self, f: F) -> PResult<'a, (R, TokenStream)> where F: FnOnce(&mut Self) -> PResult<'a, R> { // Record all tokens we parse when parsing this item. let mut tokens = Vec::new(); let prev_collecting = match self.token_cursor.frame.last_token { LastToken::Collecting(ref mut list) => { Some(mem::replace(list, Vec::new())) } LastToken::Was(ref mut last) => { tokens.extend(last.take()); None } }; self.token_cursor.frame.last_token = LastToken::Collecting(tokens); let prev = self.token_cursor.stack.len(); let ret = f(self); let last_token = if self.token_cursor.stack.len() == prev { &mut self.token_cursor.frame.last_token } else { &mut self.token_cursor.stack[prev].last_token }; // Pull our the toekns that we've collected from the call to `f` above let mut collected_tokens = match *last_token { LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()), LastToken::Was(_) => panic!("our vector went away?"), }; // If we're not at EOF our current token wasn't actually consumed by // `f`, but it'll still be in our list that we pulled out. In that case // put it back. let extra_token = if self.token != token::Eof { collected_tokens.pop() } else { None }; // If we were previously collecting tokens, then this was a recursive // call. In that case we need to record all the tokens we collected in // our parent list as well. To do that we push a clone of our stream // onto the previous list. let stream = collected_tokens.into_iter().collect::<TokenStream>(); match prev_collecting { Some(mut list) => { list.push(stream.clone()); list.extend(extra_token); *last_token = LastToken::Collecting(list); } None => { *last_token = LastToken::Was(extra_token); } } Ok((ret?, stream)) } pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> { let attrs = self.parse_outer_attributes()?; self.parse_item_(attrs, true, false) } /// `::{` or `::*` fn is_import_coupler(&mut self) -> bool { self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star)) } /// Parse UseTree /// /// USE_TREE = [`::`] `*` | /// [`::`] `{` USE_TREE_LIST `}` | /// PATH `::` `*` | /// PATH `::` `{` USE_TREE_LIST `}` | /// PATH [`as` IDENT] fn parse_use_tree(&mut self) -> PResult<'a, UseTree> { let lo = self.span; let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() }; let kind = if self.check(&token::OpenDelim(token::Brace)) || self.check(&token::BinOp(token::Star)) || self.is_import_coupler() { // `use *;` or `use ::*;` or `use {...};` or `use ::{...};` if self.eat(&token::ModSep) { prefix.segments.push(PathSegment::crate_root(lo.shrink_to_lo())); } if self.eat(&token::BinOp(token::Star)) { UseTreeKind::Glob } else { UseTreeKind::Nested(self.parse_use_tree_list()?) } } else { // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;` prefix = self.parse_path(PathStyle::Mod)?; if self.eat(&token::ModSep) { if self.eat(&token::BinOp(token::Star)) { UseTreeKind::Glob } else { UseTreeKind::Nested(self.parse_use_tree_list()?) } } else { UseTreeKind::Simple(self.parse_rename()?, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID) } }; Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) }) } /// Parse UseTreeKind::Nested(list) /// /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`] fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> { self.parse_unspanned_seq(&token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), SeqSep::trailing_allowed(token::Comma), |this| { Ok((this.parse_use_tree()?, ast::DUMMY_NODE_ID)) }) } fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> { if self.eat_keyword(keywords::As) { match self.token { token::Ident(ident, false) if ident.name == keywords::Underscore.name() => { self.bump(); // `_` Ok(Some(ident.gensym())) } _ => self.parse_ident().map(Some), } } else { Ok(None) } } /// Parses a source module as a crate. This is the main /// entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { let lo = self.span; Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, span: lo.to(self.span), }) } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { let ret = match self.token { token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf), token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf), _ => return None }; self.bump(); Some(ret) } pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> { match self.parse_optional_str() { Some((s, style, suf)) => { let sp = self.prev_span; self.expect_no_suffix(sp, "string literal", suf); Ok((s, style)) } _ => { let msg = "expected string literal"; let mut err = self.fatal(msg); err.span_label(self.span, msg); Err(err) } } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/lexer/mod.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast::{self, Ident}; use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; use source_map::{SourceMap, FilePathMapping}; use errors::{Applicability, FatalError, DiagnosticBuilder}; use parse::{token, ParseSess}; use str::char_at; use symbol::{Symbol, keywords}; use core::unicode::property::Pattern_White_Space; use std::borrow::Cow; use std::char; use std::mem::replace; use rustc_data_structures::sync::Lrc; pub mod comments; mod tokentrees; mod unicode_chars; #[derive(Clone, Debug)] pub struct TokenAndSpan { pub tok: token::Token, pub sp: Span, } impl Default for TokenAndSpan { fn default() -> Self { TokenAndSpan { tok: token::Whitespace, sp: syntax_pos::DUMMY_SP, } } } pub struct StringReader<'a> { pub sess: &'a ParseSess, /// The absolute offset within the source_map of the next character to read pub next_pos: BytePos, /// The absolute offset within the source_map of the current character pub pos: BytePos, /// The current character (which has been read from self.pos) pub ch: Option<char>, pub source_file: Lrc<syntax_pos::SourceFile>, /// Stop reading src at this index. pub end_src_index: usize, // cached: peek_tok: token::Token, peek_span: Span, peek_span_src_raw: Span, fatal_errs: Vec<DiagnosticBuilder<'a>>, // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.source_file.src.as_ref().unwrap()` all the time. src: Lrc<String>, /// Stack of open delimiters and their spans. Used for error message. token: token::Token, span: Span, /// The raw source span which *does not* take `override_span` into account span_src_raw: Span, open_braces: Vec<(token::DelimToken, Span)>, crate override_span: Option<Span>, last_unclosed_found_span: Option<Span>, } impl<'a> StringReader<'a> { fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { self.mk_sp_and_raw(lo, hi).0 } fn mk_sp_and_raw(&self, lo: BytePos, hi: BytePos) -> (Span, Span) { let raw = Span::new(lo, hi, NO_EXPANSION); let real = self.override_span.unwrap_or(raw); (real, raw) } fn mk_ident(&self, string: &str) -> Ident { let mut ident = Ident::from_str(string); if let Some(span) = self.override_span { ident.span = span; } ident } fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan { match res { Ok(tok) => tok, Err(_) => { self.emit_fatal_errors(); FatalError.raise(); } } } fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); self.unwrap_or_abort(res) } /// Return the next token. EFFECT: advances the string_reader. pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> { assert!(self.fatal_errs.is_empty()); let ret_val = TokenAndSpan { tok: replace(&mut self.peek_tok, token::Whitespace), sp: self.peek_span, }; self.advance_token()?; self.span_src_raw = self.peek_span_src_raw; Ok(ret_val) } fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> { let mut t = self.try_next_token()?; loop { match t.tok { token::Whitespace | token::Comment | token::Shebang(_) => { t = self.try_next_token()?; } _ => break, } } self.token = t.tok.clone(); self.span = t.sp; Ok(t) } pub fn real_token(&mut self) -> TokenAndSpan { let res = self.try_real_token(); self.unwrap_or_abort(res) } #[inline] fn is_eof(&self) -> bool { self.ch.is_none() } fn fail_unterminated_raw_string(&self, pos: BytePos, hash_count: u16) { let mut err = self.struct_span_fatal(pos, pos, "unterminated raw string"); err.span_label(self.mk_sp(pos, pos), "unterminated raw string"); if hash_count > 0 { err.note(&format!("this raw string should be terminated with `\"{}`", "#".repeat(hash_count as usize))); } err.emit(); FatalError.raise(); } fn fatal(&self, m: &str) -> FatalError { self.fatal_span(self.peek_span, m) } pub fn emit_fatal_errors(&mut self) { for err in &mut self.fatal_errs { err.emit(); } self.fatal_errs.clear(); } pub fn peek(&self) -> TokenAndSpan { // FIXME(pcwalton): Bad copy! TokenAndSpan { tok: self.peek_tok.clone(), sp: self.peek_span, } } /// For comments.rs, which hackily pokes into next_pos and ch fn new_raw(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>, override_span: Option<Span>) -> Self { let mut sr = StringReader::new_raw_internal(sess, source_file, override_span); sr.bump(); sr } fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>, override_span: Option<Span>) -> Self { if source_file.src.is_none() { sess.span_diagnostic.bug(&format!("Cannot lex source_file without source: {}", source_file.name)); } let src = (*source_file.src.as_ref().unwrap()).clone(); StringReader { sess, next_pos: source_file.start_pos, pos: source_file.start_pos, ch: Some('\n'), source_file, end_src_index: src.len(), // dummy values; not read peek_tok: token::Eof, peek_span: syntax_pos::DUMMY_SP, peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), token: token::Eof, span: syntax_pos::DUMMY_SP, span_src_raw: syntax_pos::DUMMY_SP, open_braces: Vec::new(), override_span, last_unclosed_found_span: None, } } pub fn new(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>, override_span: Option<Span>) -> Self { let mut sr = StringReader::new_raw(sess, source_file, override_span); if sr.advance_token().is_err() { sr.emit_fatal_errors(); FatalError.raise(); } sr } pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self { let begin = sess.source_map().lookup_byte_offset(span.lo()); let end = sess.source_map().lookup_byte_offset(span.hi()); // Make the range zero-length if the span is invalid. if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos { span = span.shrink_to_lo(); } let mut sr = StringReader::new_raw_internal(sess, begin.fm, None); // Seek the lexer to the right byte range. sr.next_pos = span.lo(); sr.end_src_index = sr.src_index(span.hi()); sr.bump(); if sr.advance_token().is_err() { sr.emit_fatal_errors(); FatalError.raise(); } sr } #[inline] fn ch_is(&self, c: char) -> bool { self.ch == Some(c) } /// Report a fatal lexical error with a given span. fn fatal_span(&self, sp: Span, m: &str) -> FatalError { self.sess.span_diagnostic.span_fatal(sp, m) } /// Report a lexical error with a given span. fn err_span(&self, sp: Span, m: &str) { self.sess.span_diagnostic.span_err(sp, m) } /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { self.fatal_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { self.err_span(self.mk_sp(from_pos, to_pos), m) } /// Pushes a character to a message string for error reporting fn push_escaped_char_for_msg(m: &mut String, c: char) { match c { '\u{20}'..='\u{7e}' => { // Don't escape \, ' or " for user-facing messages m.push(c); } _ => { m.extend(c.escape_default()); } } } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an /// escaped character to the error message fn fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> FatalError { let mut m = m.to_string(); m.push_str(": "); Self::push_escaped_char_for_msg(&mut m, c); self.fatal_span_(from_pos, to_pos, &m[..]) } fn struct_span_fatal(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), m) } fn struct_fatal_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> DiagnosticBuilder<'a> { let mut m = m.to_string(); m.push_str(": "); Self::push_escaped_char_for_msg(&mut m, c); self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an /// escaped character to the error message fn err_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) { let mut m = m.to_string(); m.push_str(": "); Self::push_escaped_char_for_msg(&mut m, c); self.err_span_(from_pos, to_pos, &m[..]); } fn struct_err_span_char(&self, from_pos: BytePos, to_pos: BytePos, m: &str, c: char) -> DiagnosticBuilder<'a> { let mut m = m.to_string(); m.push_str(": "); Self::push_escaped_char_for_msg(&mut m, c); self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the /// offending string to the error message fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) -> FatalError { m.push_str(": "); m.push_str(&self.src[self.src_index(from_pos)..self.src_index(to_pos)]); self.fatal_span_(from_pos, to_pos, &m[..]) } /// Advance peek_tok and peek_span to refer to the next token, and /// possibly update the interner. fn advance_token(&mut self) -> Result<(), ()> { match self.scan_whitespace_or_comment() { Some(comment) => { self.peek_span_src_raw = comment.sp; self.peek_span = comment.sp; self.peek_tok = comment.tok; } None => { if self.is_eof() { self.peek_tok = token::Eof; let (real, raw) = self.mk_sp_and_raw( self.source_file.end_pos, self.source_file.end_pos, ); self.peek_span = real; self.peek_span_src_raw = raw; } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos); self.peek_span = real; self.peek_span_src_raw = raw; }; } } Ok(()) } #[inline] fn src_index(&self, pos: BytePos) -> usize { (pos - self.source_file.start_pos).to_usize() } /// Calls `f` with a string slice of the source text spanning from `start` /// up to but excluding `self.pos`, meaning the slice does not include /// the character `self.ch`. fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T where F: FnOnce(&str) -> T { self.with_str_from_to(start, self.pos, f) } /// Create a Name from a given offset to the current offset, each /// adjusted 1 towards each other (assumes that on either side there is a /// single-byte delimiter). fn name_from(&self, start: BytePos) -> ast::Name { debug!("taking an ident from {:?} to {:?}", start, self.pos); self.with_str_from(start, Symbol::intern) } /// As name_from, with an explicit endpoint. fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name { debug!("taking an ident from {:?} to {:?}", start, end); self.with_str_from_to(start, end, Symbol::intern) } /// Calls `f` with a string slice of the source text spanning from `start` /// up to but excluding `end`. fn with_str_from_to<T, F>(&self, start: BytePos, end: BytePos, f: F) -> T where F: FnOnce(&str) -> T { f(&self.src[self.src_index(start)..self.src_index(end)]) } /// Converts CRLF to LF in the given string, raising an error on bare CR. fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> Cow<'b, str> { let mut i = 0; while i < s.len() { let ch = char_at(s, i); let next = i + ch.len_utf8(); if ch == '\r' { if next < s.len() && char_at(s, next) == '\n' { return translate_crlf_(self, start, s, errmsg, i).into(); } let pos = start + BytePos(i as u32); let end_pos = start + BytePos(next as u32); self.err_span_(pos, end_pos, errmsg); } i = next; } return s.into(); fn translate_crlf_(rdr: &StringReader, start: BytePos, s: &str, errmsg: &str, mut i: usize) -> String { let mut buf = String::with_capacity(s.len()); let mut j = 0; while i < s.len() { let ch = char_at(s, i); let next = i + ch.len_utf8(); if ch == '\r' { if j < i { buf.push_str(&s[j..i]); } j = next; if next >= s.len() || char_at(s, next) != '\n' { let pos = start + BytePos(i as u32); let end_pos = start + BytePos(next as u32); rdr.err_span_(pos, end_pos, errmsg); } } i = next; } if j < s.len() { buf.push_str(&s[j..]); } buf } } /// Advance the StringReader by one character. If a newline is /// discovered, add it to the SourceFile's list of line start offsets. crate fn bump(&mut self) { let next_src_index = self.src_index(self.next_pos); if next_src_index < self.end_src_index { let next_ch = char_at(&self.src, next_src_index); let next_ch_len = next_ch.len_utf8(); self.ch = Some(next_ch); self.pos = self.next_pos; self.next_pos = self.next_pos + Pos::from_usize(next_ch_len); } else { self.ch = None; self.pos = self.next_pos; } } fn nextch(&self) -> Option<char> { let next_src_index = self.src_index(self.next_pos); if next_src_index < self.end_src_index { Some(char_at(&self.src, next_src_index)) } else { None } } #[inline] fn nextch_is(&self, c: char) -> bool { self.nextch() == Some(c) } fn nextnextch(&self) -> Option<char> { let next_src_index = self.src_index(self.next_pos); if next_src_index < self.end_src_index { let next_next_src_index = next_src_index + char_at(&self.src, next_src_index).len_utf8(); if next_next_src_index < self.end_src_index { return Some(char_at(&self.src, next_next_src_index)); } } None } #[inline] fn nextnextch_is(&self, c: char) -> bool { self.nextnextch() == Some(c) } /// Eats <XID_start><XID_continue>*, if possible. fn scan_optional_raw_name(&mut self) -> Option<ast::Name> { if !ident_start(self.ch) { return None; } let start = self.pos; self.bump(); while ident_continue(self.ch) { self.bump(); } self.with_str_from(start, |string| { if string == "_" { self.sess.span_diagnostic .struct_span_warn(self.mk_sp(start, self.pos), "underscore literal suffix is not allowed") .warn("this was previously accepted by the compiler but is \ being phased out; it will become a hard error in \ a future release!") .note("for more information, see issue #42326 \ <https://github.com/rust-lang/rust/issues/42326>") .emit(); None } else { Some(Symbol::intern(string)) } }) } /// PRECONDITION: self.ch is not whitespace /// Eats any kind of comment. fn scan_comment(&mut self) -> Option<TokenAndSpan> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg); } } if self.ch_is('/') { match self.nextch() { Some('/') => { self.bump(); self.bump(); // line comments starting with "///" or "//!" are doc-comments let doc_comment = (self.ch_is('/') && !self.nextch_is('/')) || self.ch_is('!'); let start_bpos = self.pos - BytePos(2); while !self.is_eof() { match self.ch.unwrap() { '\n' => break, '\r' => { if self.nextch_is('\n') { // CRLF break; } else if doc_comment { self.err_span_(self.pos, self.next_pos, "bare CR not allowed in doc-comment"); } } _ => (), } self.bump(); } if doc_comment { self.with_str_from(start_bpos, |string| { // comments with only more "/"s are not doc comments let tok = if is_doc_comment(string) { token::DocComment(Symbol::intern(string)) } else { token::Comment }; Some(TokenAndSpan { tok, sp: self.mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, sp: self.mk_sp(start_bpos, self.pos), }) } } Some('*') => { self.bump(); self.bump(); self.scan_block_comment() } _ => None, } } else if self.ch_is('#') { if self.nextch_is('!') { // Parse an inner attribute. if self.nextnextch_is('[') { return None; } // I guess this is the only way to figure out if // we're at the beginning of the file... let cmap = SourceMap::new(FilePathMapping::empty()); cmap.files.borrow_mut().file_maps.push(self.source_file.clone()); let loc = cmap.lookup_char_pos_adj(self.pos); debug!("Skipping a shebang"); if loc.line == 1 && loc.col == CharPos(0) { // FIXME: Add shebang "token", return it let start = self.pos; while !self.ch_is('\n') && !self.is_eof() { self.bump(); } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), sp: self.mk_sp(start, self.pos), }); } } None } else { None } } /// If there is whitespace, shebang, or a comment, scan it. Otherwise, /// return None. fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> { match self.ch.unwrap_or('\0') { // # to handle shebang at start of file -- this is the entry point // for skipping over all "junk" '/' | '#' => { let c = self.scan_comment(); debug!("scanning a comment {:?}", c); c }, c if is_pattern_whitespace(Some(c)) => { let start_bpos = self.pos; while is_pattern_whitespace(self.ch) { self.bump(); } let c = Some(TokenAndSpan { tok: token::Whitespace, sp: self.mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c } _ => None, } } /// Might return a sugared-doc-attr fn scan_block_comment(&mut self) -> Option<TokenAndSpan> { // block comments starting with "/**" or "/*!" are doc-comments let is_doc_comment = self.ch_is('*') || self.ch_is('!'); let start_bpos = self.pos - BytePos(2); let mut level: isize = 1; let mut has_cr = false; while level > 0 { if self.is_eof() { let msg = if is_doc_comment { "unterminated block doc-comment" } else { "unterminated block comment" }; let last_bpos = self.pos; self.fatal_span_(start_bpos, last_bpos, msg).raise(); } let n = self.ch.unwrap(); match n { '/' if self.nextch_is('*') => { level += 1; self.bump(); } '*' if self.nextch_is('/') => { level -= 1; self.bump(); } '\r' => { has_cr = true; } _ => (), } self.bump(); } self.with_str_from(start_bpos, |string| { // but comments with only "*"s between two "/"s are not let tok = if is_block_doc_comment(string) { let string = if has_cr { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into() }; token::DocComment(Symbol::intern(&string[..])) } else { token::Comment }; Some(TokenAndSpan { tok, sp: self.mk_sp(start_bpos, self.pos), }) }) } /// Scan through any digits (base `scan_radix`) or underscores, /// and return how many digits there were. /// /// `real_radix` represents the true radix of the number we're /// interested in, and errors will be emitted for any digits /// between `real_radix` and `scan_radix`. fn scan_digits(&mut self, real_radix: u32, scan_radix: u32) -> usize { assert!(real_radix <= scan_radix); let mut len = 0; loop { let c = self.ch; if c == Some('_') { debug!("skipping a _"); self.bump(); continue; } match c.and_then(|cc| cc.to_digit(scan_radix)) { Some(_) => { debug!("{:?} in scan_digits", c); // check that the hypothetical digit is actually // in range for the true radix if c.unwrap().to_digit(real_radix).is_none() { self.err_span_(self.pos, self.next_pos, &format!("invalid digit for a base {} literal", real_radix)); } len += 1; self.bump(); } _ => return len, } } } /// Lex a LIT_INTEGER or a LIT_FLOAT fn scan_number(&mut self, c: char) -> token::Lit { let mut base = 10; let start_bpos = self.pos; self.bump(); let num_digits = if c == '0' { match self.ch.unwrap_or('\0') { 'b' => { self.bump(); base = 2; self.scan_digits(2, 10) } 'o' => { self.bump(); base = 8; self.scan_digits(8, 10) } 'x' => { self.bump(); base = 16; self.scan_digits(16, 16) } '0'..='9' | '_' | '.' | 'e' | 'E' => { self.scan_digits(10, 10) + 1 } _ => { // just a 0 return token::Integer(self.name_from(start_bpos)); } } } else if c.is_digit(10) { self.scan_digits(10, 10) + 1 } else { 0 }; if num_digits == 0 { self.err_span_(start_bpos, self.pos, "no valid digits found for number"); return token::Integer(Symbol::intern("0")); } // might be a float, but don't be greedy if this is actually an // integer literal followed by field/method access or a range pattern // (`0..2` and `12.foo()`) if self.ch_is('.') && !self.nextch_is('.') && !ident_start(self.nextch()) { // might have stuff after the ., and if it does, it needs to start // with a number self.bump(); if self.ch.unwrap_or('\0').is_digit(10) { self.scan_digits(10, 10); self.scan_float_exponent(); } let pos = self.pos; self.check_float_base(start_bpos, pos, base); token::Float(self.name_from(start_bpos)) } else { // it might be a float if it has an exponent if self.ch_is('e') || self.ch_is('E') { self.scan_float_exponent(); let pos = self.pos; self.check_float_base(start_bpos, pos, base); return token::Float(self.name_from(start_bpos)); } // but we certainly have an integer! token::Integer(self.name_from(start_bpos)) } } /// Scan over `n_digits` hex digits, stopping at `delim`, reporting an /// error if too many or too few digits are encountered. fn scan_hex_digits(&mut self, n_digits: usize, delim: char, below_0x7f_only: bool) -> bool { debug!("scanning {} digits until {:?}", n_digits, delim); let start_bpos = self.pos; let mut accum_int = 0; let mut valid = true; for _ in 0..n_digits { if self.is_eof() { let last_bpos = self.pos; self.fatal_span_(start_bpos, last_bpos, "unterminated numeric character escape").raise(); } if self.ch_is(delim) { let last_bpos = self.pos; self.err_span_(start_bpos, last_bpos, "numeric character escape is too short"); valid = false; break; } let c = self.ch.unwrap_or('\x00'); accum_int *= 16; accum_int += c.to_digit(16).unwrap_or_else(|| { self.err_span_char(self.pos, self.next_pos, "invalid character in numeric character escape", c); valid = false; 0 }); self.bump(); } if below_0x7f_only && accum_int >= 0x80 { self.err_span_(start_bpos, self.pos, "this form of character escape may only be used with characters in \ the range [\\x00-\\x7f]"); valid = false; } match char::from_u32(accum_int) { Some(_) => valid, None => { let last_bpos = self.pos; self.err_span_(start_bpos, last_bpos, "invalid numeric character escape"); false } } } /// Scan for a single (possibly escaped) byte or char /// in a byte, (non-raw) byte string, char, or (non-raw) string literal. /// `start` is the position of `first_source_char`, which is already consumed. /// /// Returns true if there was a valid char/byte, false otherwise. fn scan_char_or_byte(&mut self, start: BytePos, first_source_char: char, ascii_only: bool, delim: char) -> bool { match first_source_char { '\\' => { // '\X' for some X must be a character constant: let escaped = self.ch; let escaped_pos = self.pos; self.bump(); match escaped { None => {} // EOF here is an error that will be checked later. Some(e) => { return match e { 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0' => true, 'x' => self.scan_byte_escape(delim, !ascii_only), 'u' => { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { let span = self.mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, "format of unicode escape sequences is \ `\\u{…}`") .emit(); false }; if ascii_only { self.err_span_(start, self.pos, "unicode escape sequences cannot be used as a \ byte or in a byte string"); } valid } '\n' if delim == '"' => { self.consume_whitespace(); true } '\r' if delim == '"' && self.ch_is('\n') => { self.consume_whitespace(); true } c => { let pos = self.pos; let mut err = self.struct_err_span_char(escaped_pos, pos, if ascii_only { "unknown byte escape" } else { "unknown character \ escape" }, c); if e == '\r' { err.span_help(self.mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { err.span_help(self.mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } err.emit(); false } } } } } '\t' | '\n' | '\r' | '\'' if delim == '\'' => { let pos = self.pos; self.err_span_char(start, pos, if ascii_only { "byte constant must be escaped" } else { "character constant must be escaped" }, first_source_char); return false; } '\r' => { if self.ch_is('\n') { self.bump(); return true; } else { self.err_span_(start, self.pos, "bare CR not allowed in string, use \\r instead"); return false; } } _ => { if ascii_only && first_source_char > '\x7F' { let pos = self.pos; self.err_span_(start, pos, "byte constant must be ASCII. Use a \\xHH escape for a \ non-ASCII byte"); return false; } } } true } /// Scan over a `\u{...}` escape /// /// At this point, we have already seen the `\` and the `u`, the `{` is the current character. /// We will read a hex number (with `_` separators), with 1 to 6 actual digits, /// and pass over the `}`. fn scan_unicode_escape(&mut self, delim: char) -> bool { self.bump(); // past the { let start_bpos = self.pos; let mut valid = true; if let Some('_') = self.ch { // disallow leading `_` self.err_span_(self.pos, self.next_pos, "invalid start of unicode escape"); valid = false; } let count = self.scan_digits(16, 16); if count > 6 { self.err_span_(start_bpos, self.pos, "overlong unicode escape (must have at most 6 hex digits)"); valid = false; } loop { match self.ch { Some('}') => { if valid && count == 0 { self.err_span_(start_bpos, self.pos, "empty unicode escape (must have at least 1 hex digit)"); valid = false; } self.bump(); // past the ending `}` break; }, Some(c) => { if c == delim { self.err_span_(self.pos, self.pos, "unterminated unicode escape (needed a `}`)"); valid = false; break; } else if valid { self.err_span_char(start_bpos, self.pos, "invalid character in unicode escape", c); valid = false; } }, None => { self.fatal_span_(start_bpos, self.pos, "unterminated unicode escape (found EOF)").raise(); } } self.bump(); } valid } /// Scan over a float exponent. fn scan_float_exponent(&mut self) { if self.ch_is('e') || self.ch_is('E') { self.bump(); if self.ch_is('-') || self.ch_is('+') { self.bump(); } if self.scan_digits(10, 10) == 0 { let mut err = self.struct_span_fatal( self.pos, self.next_pos, "expected at least one digit in exponent" ); if let Some(ch) = self.ch { // check for e.g. Unicode minus '−' (Issue #49746) if unicode_chars::check_for_substitution(self, ch, &mut err) { self.bump(); self.scan_digits(10, 10); } } err.emit(); } } } /// Check that a base is valid for a floating literal, emitting a nice /// error if it isn't. fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) { match base { 16 => { self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not supported") } 8 => { self.err_span_(start_bpos, last_bpos, "octal float literal is not supported") } 2 => { self.err_span_(start_bpos, last_bpos, "binary float literal is not supported") } _ => (), } } fn binop(&mut self, op: token::BinOpToken) -> token::Token { self.bump(); if self.ch_is('=') { self.bump(); token::BinOpEq(op) } else { token::BinOp(op) } } /// Return the next token from the string, advances the input past that /// token, and updates the interner fn next_token_inner(&mut self) -> Result<token::Token, ()> { let c = self.ch; if ident_start(c) { let (is_ident_start, is_raw_ident) = match (c.unwrap(), self.nextch(), self.nextnextch()) { // r# followed by an identifier starter is a raw identifier. // This is an exception to the r# case below. ('r', Some('#'), x) if ident_start(x) => (true, true), // r as in r" or r#" is part of a raw string literal. // b as in b' is part of a byte literal. // They are not identifiers, and are handled further down. ('r', Some('"'), _) | ('r', Some('#'), _) | ('b', Some('"'), _) | ('b', Some('\''), _) | ('b', Some('r'), Some('"')) | ('b', Some('r'), Some('#')) => (false, false), _ => (true, false), }; if is_ident_start { let raw_start = self.pos; if is_raw_ident { // Consume the 'r#' characters. self.bump(); self.bump(); } let start = self.pos; self.bump(); while ident_continue(self.ch) { self.bump(); } return Ok(self.with_str_from(start, |string| { // FIXME: perform NFKC normalization here. (Issue #2253) let ident = self.mk_ident(string); if is_raw_ident && (ident.is_path_segment_keyword() || ident.name == keywords::Underscore.name()) { self.fatal_span_(raw_start, self.pos, &format!("`r#{}` is not currently supported.", ident.name) ).raise(); } if is_raw_ident { let span = self.mk_sp(raw_start, self.pos); self.sess.raw_identifier_spans.borrow_mut().push(span); } token::Ident(ident, is_raw_ident) })); } } if is_dec_digit(c) { let num = self.scan_number(c.unwrap()); let suffix = self.scan_optional_raw_name(); debug!("next_token_inner: scanned number {:?}, {:?}", num, suffix); return Ok(token::Literal(num, suffix)); } match c.expect("next_token_inner called at EOF") { // One-byte tokens. ';' => { self.bump(); Ok(token::Semi) } ',' => { self.bump(); Ok(token::Comma) } '.' => { self.bump(); if self.ch_is('.') { self.bump(); if self.ch_is('.') { self.bump(); Ok(token::DotDotDot) } else if self.ch_is('=') { self.bump(); Ok(token::DotDotEq) } else { Ok(token::DotDot) } } else { Ok(token::Dot) } } '(' => { self.bump(); Ok(token::OpenDelim(token::Paren)) } ')' => { self.bump(); Ok(token::CloseDelim(token::Paren)) } '{' => { self.bump(); Ok(token::OpenDelim(token::Brace)) } '}' => { self.bump(); Ok(token::CloseDelim(token::Brace)) } '[' => { self.bump(); Ok(token::OpenDelim(token::Bracket)) } ']' => { self.bump(); Ok(token::CloseDelim(token::Bracket)) } '@' => { self.bump(); Ok(token::At) } '#' => { self.bump(); Ok(token::Pound) } '~' => { self.bump(); Ok(token::Tilde) } '?' => { self.bump(); Ok(token::Question) } ':' => { self.bump(); if self.ch_is(':') { self.bump(); Ok(token::ModSep) } else { Ok(token::Colon) } } '$' => { self.bump(); Ok(token::Dollar) } // Multi-byte tokens. '=' => { self.bump(); if self.ch_is('=') { self.bump(); Ok(token::EqEq) } else if self.ch_is('>') { self.bump(); Ok(token::FatArrow) } else { Ok(token::Eq) } } '!' => { self.bump(); if self.ch_is('=') { self.bump(); Ok(token::Ne) } else { Ok(token::Not) } } '<' => { self.bump(); match self.ch.unwrap_or('\x00') { '=' => { self.bump(); Ok(token::Le) } '<' => { Ok(self.binop(token::Shl)) } '-' => { self.bump(); Ok(token::LArrow) } _ => { Ok(token::Lt) } } } '>' => { self.bump(); match self.ch.unwrap_or('\x00') { '=' => { self.bump(); Ok(token::Ge) } '>' => { Ok(self.binop(token::Shr)) } _ => { Ok(token::Gt) } } } '\'' => { // Either a character constant 'a' OR a lifetime name 'abc let start_with_quote = self.pos; self.bump(); let start = self.pos; // the eof will be picked up by the final `'` check below let c2 = self.ch.unwrap_or('\x00'); self.bump(); // If the character is an ident start not followed by another single // quote, then this is a lifetime name: if ident_start(Some(c2)) && !self.ch_is('\'') { while ident_continue(self.ch) { self.bump(); } // lifetimes shouldn't end with a single quote // if we find one, then this is an invalid character literal if self.ch_is('\'') { self.fatal_span_verbose(start_with_quote, self.next_pos, String::from("character literal may only contain one codepoint")) .raise(); } // Include the leading `'` in the real identifier, for macro // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { self.mk_ident(&format!("'{}", lifetime_name)) }); return Ok(token::Lifetime(ident)); } let valid = self.scan_char_or_byte(start, c2, /* ascii_only */ false, '\''); if !self.ch_is('\'') { let pos = self.pos; loop { self.bump(); if self.ch_is('\'') { let start = self.src_index(start); let end = self.src_index(self.pos); self.bump(); let span = self.mk_sp(start_with_quote, self.pos); self.sess.span_diagnostic .struct_span_err(span, "character literal may only contain one codepoint") .span_suggestion_with_applicability( span, "if you meant to write a `str` literal, use double quotes", format!("\"{}\"", &self.src[start..end]), Applicability::MachineApplicable ).emit(); return Ok(token::Literal(token::Str_(Symbol::intern("??")), None)) } if self.ch_is('\n') || self.is_eof() || self.ch_is('/') { // Only attempt to infer single line string literals. If we encounter // a slash, bail out in order to avoid nonsensical suggestion when // involving comments. break; } } self.fatal_span_verbose(start_with_quote, pos, String::from("character literal may only contain one codepoint")).raise(); } let id = if valid { self.name_from(start) } else { Symbol::intern("0") }; self.bump(); // advance ch past token let suffix = self.scan_optional_raw_name(); Ok(token::Literal(token::Char(id), suffix)) } 'b' => { self.bump(); let lit = match self.ch { Some('\'') => self.scan_byte(), Some('"') => self.scan_byte_string(), Some('r') => self.scan_raw_byte_string(), _ => unreachable!(), // Should have been a token::Ident above. }; let suffix = self.scan_optional_raw_name(); Ok(token::Literal(lit, suffix)) } '"' => { let start_bpos = self.pos; let mut valid = true; self.bump(); while !self.ch_is('"') { if self.is_eof() { let last_bpos = self.pos; self.fatal_span_(start_bpos, last_bpos, "unterminated double quote string").raise(); } let ch_start = self.pos; let ch = self.ch.unwrap(); self.bump(); valid &= self.scan_char_or_byte(ch_start, ch, /* ascii_only */ false, '"'); } // adjust for the ASCII " at the start of the literal let id = if valid { self.name_from(start_bpos + BytePos(1)) } else { Symbol::intern("??") }; self.bump(); let suffix = self.scan_optional_raw_name(); Ok(token::Literal(token::Str_(id), suffix)) } 'r' => { let start_bpos = self.pos; self.bump(); let mut hash_count: u16 = 0; while self.ch_is('#') { if hash_count == 65535 { let bpos = self.next_pos; self.fatal_span_(start_bpos, bpos, "too many `#` symbols: raw strings may be \ delimited by up to 65535 `#` symbols").raise(); } self.bump(); hash_count += 1; } if self.is_eof() { self.fail_unterminated_raw_string(start_bpos, hash_count); } else if !self.ch_is('"') { let last_bpos = self.pos; let curr_char = self.ch.unwrap(); self.fatal_span_char(start_bpos, last_bpos, "found invalid character; only `#` is allowed \ in raw string delimitation", curr_char).raise(); } self.bump(); let content_start_bpos = self.pos; let mut content_end_bpos; let mut valid = true; 'outer: loop { if self.is_eof() { self.fail_unterminated_raw_string(start_bpos, hash_count); } // if self.ch_is('"') { // content_end_bpos = self.pos; // for _ in 0..hash_count { // self.bump(); // if !self.ch_is('#') { // continue 'outer; let c = self.ch.unwrap(); match c { '"' => { content_end_bpos = self.pos; for _ in 0..hash_count { self.bump(); if !self.ch_is('#') { continue 'outer; } } break; } '\r' => { if !self.nextch_is('\n') { let last_bpos = self.pos; self.err_span_(start_bpos, last_bpos, "bare CR not allowed in raw string, use \\r \ instead"); valid = false; } } _ => (), } self.bump(); } self.bump(); let id = if valid { self.name_from_to(content_start_bpos, content_end_bpos) } else { Symbol::intern("??") }; let suffix = self.scan_optional_raw_name(); Ok(token::Literal(token::StrRaw(id, hash_count), suffix)) } '-' => { if self.nextch_is('>') { self.bump(); self.bump(); Ok(token::RArrow) } else { Ok(self.binop(token::Minus)) } } '&' => { if self.nextch_is('&') { self.bump(); self.bump(); Ok(token::AndAnd) } else { Ok(self.binop(token::And)) } } '|' => { match self.nextch() { Some('|') => { self.bump(); self.bump(); Ok(token::OrOr) } _ => { Ok(self.binop(token::Or)) } } } '+' => { Ok(self.binop(token::Plus)) } '*' => { Ok(self.binop(token::Star)) } '/' => { Ok(self.binop(token::Slash)) } '^' => { Ok(self.binop(token::Caret)) } '%' => { Ok(self.binop(token::Percent)) } c => { let last_bpos = self.pos; let bpos = self.next_pos; let mut err = self.struct_fatal_span_char(last_bpos, bpos, "unknown start of token", c); unicode_chars::check_for_substitution(self, c, &mut err); self.fatal_errs.push(err); Err(()) } } } fn consume_whitespace(&mut self) { while is_pattern_whitespace(self.ch) && !self.is_eof() { self.bump(); } } fn read_to_eol(&mut self) -> String { let mut val = String::new(); while !self.ch_is('\n') && !self.is_eof() { val.push(self.ch.unwrap()); self.bump(); } if self.ch_is('\n') { self.bump(); } val } fn read_one_line_comment(&mut self) -> String { let val = self.read_to_eol(); assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') || (val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!')); val } fn consume_non_eol_whitespace(&mut self) { while is_pattern_whitespace(self.ch) && !self.ch_is('\n') && !self.is_eof() { self.bump(); } } fn peeking_at_comment(&self) -> bool { (self.ch_is('/') && self.nextch_is('/')) || (self.ch_is('/') && self.nextch_is('*')) || // consider shebangs comments, but not inner attributes (self.ch_is('#') && self.nextch_is('!') && !self.nextnextch_is('[')) } fn scan_byte(&mut self) -> token::Lit { self.bump(); let start = self.pos; // the eof will be picked up by the final `'` check below let c2 = self.ch.unwrap_or('\x00'); self.bump(); let valid = self.scan_char_or_byte(start, c2, // ascii_only = true, '\''); if !self.ch_is('\'') { // Byte offsetting here is okay because the // character before position `start` are an // ascii single quote and ascii 'b'. let pos = self.pos; self.fatal_span_verbose(start - BytePos(2), pos, "unterminated byte constant".to_string()).raise(); } let id = if valid { self.name_from(start) } else { Symbol::intern("?") }; self.bump(); // advance ch past token token::Byte(id) } #[inline] fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool { self.scan_hex_digits(2, delim, below_0x7f_only) } fn scan_byte_string(&mut self) -> token::Lit { self.bump(); let start = self.pos; let mut valid = true; while !self.ch_is('"') { if self.is_eof() { let pos = self.pos; self.fatal_span_(start, pos, "unterminated double quote byte string").raise(); } let ch_start = self.pos; let ch = self.ch.unwrap(); self.bump(); valid &= self.scan_char_or_byte(ch_start, ch, // ascii_only = true, '"'); } let id = if valid { self.name_from(start) } else { Symbol::intern("??") }; self.bump(); token::ByteStr(id) } fn scan_raw_byte_string(&mut self) -> token::Lit { let start_bpos = self.pos; self.bump(); let mut hash_count = 0; while self.ch_is('#') { if hash_count == 65535 { let bpos = self.next_pos; self.fatal_span_(start_bpos, bpos, "too many `#` symbols: raw byte strings may be \ delimited by up to 65535 `#` symbols").raise(); } self.bump(); hash_count += 1; } if self.is_eof() { self.fail_unterminated_raw_string(start_bpos, hash_count); } else if !self.ch_is('"') { let pos = self.pos; let ch = self.ch.unwrap(); self.fatal_span_char(start_bpos, pos, "found invalid character; only `#` is allowed in raw \ string delimitation", ch).raise(); } self.bump(); let content_start_bpos = self.pos; let mut content_end_bpos; 'outer: loop { match self.ch { None => { self.fail_unterminated_raw_string(start_bpos, hash_count); } Some('"') => { content_end_bpos = self.pos; for _ in 0..hash_count { self.bump(); if !self.ch_is('#') { continue 'outer; } } break; } Some(c) => { if c > '\x7F' { let pos = self.pos; self.err_span_char(pos, pos, "raw byte string must be ASCII", c); } } } self.bump(); } self.bump(); token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos), hash_count) } } // This tests the character for the unicode property 'PATTERN_WHITE_SPACE' which // is guaranteed to be forward compatible. http://unicode.org/reports/tr31/#R3 #[inline] crate fn is_pattern_whitespace(c: Option<char>) -> bool { c.map_or(false, Pattern_White_Space) } #[inline] fn in_range(c: Option<char>, lo: char, hi: char) -> bool { c.map_or(false, |c| lo <= c && c <= hi) } #[inline] fn is_dec_digit(c: Option<char>) -> bool { in_range(c, '0', '9') } fn is_doc_comment(s: &str) -> bool { let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') || s.starts_with("//!"); debug!("is {:?} a doc comment? {}", s, res); res } fn is_block_doc_comment(s: &str) -> bool { // Prevent `/**/` from being parsed as a doc comment let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') || s.starts_with("/*!")) && s.len() >= 5; debug!("is {:?} a doc comment? {}", s, res); res } fn ident_start(c: Option<char>) -> bool { let c = match c { Some(c) => c, None => return false, }; (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || (c > '\x7f' && c.is_xid_start()) } fn ident_continue(c: Option<char>) -> bool { let c = match c { Some(c) => c, None => return false, }; (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || (c > '\x7f' && c.is_xid_continue()) } #[cfg(test)] mod tests { use super::*; use ast::{Ident, CrateConfig}; use symbol::Symbol; use syntax_pos::{BytePos, Span, NO_EXPANSION}; use source_map::SourceMap; use errors; use feature_gate::UnstableFeatures; use parse::token; use std::collections::HashSet; use std::io; use std::path::PathBuf; use diagnostics::plugin::ErrorMap; use rustc_data_structures::sync::Lock; use with_globals; fn mk_sess(cm: Lrc<SourceMap>) -> ParseSess { let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), Some(cm.clone()), false, false); ParseSess { span_diagnostic: errors::Handler::with_emitter(true, false, Box::new(emitter)), unstable_features: UnstableFeatures::from_environment(), config: CrateConfig::new(), included_mod_stack: Lock::new(Vec::new()), code_map: cm, missing_fragment_specifiers: Lock::new(HashSet::new()), raw_identifier_spans: Lock::new(Vec::new()), registered_diagnostics: Lock::new(ErrorMap::new()), non_modrs_mods: Lock::new(vec![]), buffered_lints: Lock::new(vec![]), } } // open a string reader for the given string fn setup<'a>(cm: &SourceMap, sess: &'a ParseSess, teststr: String) -> StringReader<'a> { let fm = cm.new_source_file(PathBuf::from("zebra.rs").into(), teststr); StringReader::new(sess, fm, None) } #[test] fn t1() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut string_reader = setup(&cm, &sh, "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); let id = Ident::from_str("fn"); assert_eq!(string_reader.next_token().tok, token::Comment); assert_eq!(string_reader.next_token().tok, token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = TokenAndSpan { tok: token::Ident(id, false), sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION), }; assert_eq!(tok1.tok, tok2.tok); assert_eq!(tok1.sp, tok2.sp); assert_eq!(string_reader.next_token().tok, token::Whitespace); // the 'main' id is already read: assert_eq!(string_reader.pos.clone(), BytePos(28)); // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan { tok: mk_ident("main"), sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION), }; assert_eq!(tok3.tok, tok4.tok); assert_eq!(tok3.sp, tok4.sp); // the lparen is already read: assert_eq!(string_reader.pos.clone(), BytePos(29)) }) } // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) fn check_tokenization(mut string_reader: StringReader, expected: Vec<token::Token>) { for expected_tok in &expected { assert_eq!(&string_reader.next_token().tok, expected_tok); } } // make the identifier by looking up the string in the interner fn mk_ident(id: &str) -> token::Token { token::Token::from_ast_ident(Ident::from_str(id)) } #[test] fn doublecolonparsing() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a b".to_string()), vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); }) } #[test] fn dcparsing_2() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a::b".to_string()), vec![mk_ident("a"), token::ModSep, mk_ident("b")]); }) } #[test] fn dcparsing_3() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a ::b".to_string()), vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); }) } #[test] fn dcparsing_4() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); check_tokenization(setup(&cm, &sh, "a:: b".to_string()), vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); }) } #[test] fn character_a() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern("a")), None)); }) } #[test] fn character_space() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern(" ")), None)); }) } #[test] fn character_escaped() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, token::Literal(token::Char(Symbol::intern("\\n")), None)); }) } #[test] fn lifetime_name() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, token::Lifetime(Ident::from_str("'abc"))); }) } #[test] fn raw_string() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) .next_token() .tok, token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); }) } #[test] fn literal_suffixes() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, token::Literal(token::$tok_type(Symbol::intern($tok_contents)), Some(Symbol::intern("suffix")))); // with a whitespace separator: assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, token::Literal(token::$tok_type(Symbol::intern($tok_contents)), None)); }} } test!("'a'", Char, "a"); test!("b'a'", Byte, "a"); test!("\"a\"", Str_, "a"); test!("b\"a\"", ByteStr, "a"); test!("1234", Integer, "1234"); test!("0b101", Integer, "0b101"); test!("0xABC", Integer, "0xABC"); test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, token::Literal(token::Integer(Symbol::intern("2")), Some(Symbol::intern("us")))); assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, token::Literal(token::StrRaw(Symbol::intern("raw"), 3), Some(Symbol::intern("suffix")))); assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), Some(Symbol::intern("suffix")))); }) } #[test] fn line_doc_comments() { assert!(is_doc_comment("///")); assert!(is_doc_comment("/// blah")); assert!(!is_doc_comment("////")); } #[test] fn nested_block_comments() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); match lexer.next_token().tok { token::Comment => {} _ => panic!("expected a comment!"), } assert_eq!(lexer.next_token().tok, token::Literal(token::Char(Symbol::intern("a")), None)); }) } #[test] fn crlf_comments() { with_globals(|| { let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(cm.clone()); let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); assert_eq!(comment.tok, token::Comment); assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, token::DocComment(Symbol::intern("/// test"))); }) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/lexer/comments.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::CommentStyle::*; use ast; use source_map::SourceMap; use syntax_pos::{BytePos, CharPos, Pos, FileName}; use parse::lexer::{is_block_doc_comment, is_pattern_whitespace}; use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan}; use print::pprust; use str::char_at; use std::io::Read; use std::usize; #[derive(Clone, Copy, PartialEq, Debug)] pub enum CommentStyle { /// No code on either side of each line of the comment Isolated, /// Code exists to the left of the comment Trailing, /// Code before /* foo */ and after the comment Mixed, /// Just a manual blank line "\n\n", for layout BlankLine, } #[derive(Clone)] pub struct Comment { pub style: CommentStyle, pub lines: Vec<String>, pub pos: BytePos, } fn is_doc_comment(s: &str) -> bool { (s.starts_with("///") && super::is_doc_comment(s)) || s.starts_with("//!") || (s.starts_with("/**") && is_block_doc_comment(s)) || s.starts_with("/*!") } pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { assert!(is_doc_comment(comment)); if comment.starts_with("//!") || comment.starts_with("/*!") { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer } } pub fn strip_doc_comment_decoration(comment: &str) -> String { /// remove whitespace-only lines from the start/end of lines fn vertical_trim(lines: Vec<String>) -> Vec<String> { let mut i = 0; let mut j = lines.len(); // first line of all-stars should be omitted if !lines.is_empty() && lines[0].chars().all(|c| c == '*') { i += 1; } while i < j && lines[i].trim().is_empty() { i += 1; } // like the first, a last line of all stars should be omitted if j > i && lines[j - 1] .chars() .skip(1) .all(|c| c == '*') { j -= 1; } while j > i && lines[j - 1].trim().is_empty() { j -= 1; } lines[i..j].to_vec() } /// remove a "[ \t]*\*" block from each line, if possible fn horizontal_trim(lines: Vec<String>) -> Vec<String> { let mut i = usize::MAX; let mut can_trim = true; let mut first = true; for line in &lines { for (j, c) in line.chars().enumerate() { if j > i || !"* \t".contains(c) { can_trim = false; break; } if c == '*' { if first { i = j; first = false; } else if i != j { can_trim = false; } break; } } if i >= line.len() { can_trim = false; } if !can_trim { break; } } if can_trim { lines.iter() .map(|line| (&line[i + 1..line.len()]).to_string()) .collect() } else { lines } } // one-line comments lose their prefix const ONELINERS: &[&str] = &["///!", "///", "//!", "//"]; for prefix in ONELINERS { if comment.starts_with(*prefix) { return (&comment[prefix.len()..]).to_string(); } } if comment.starts_with("/*") { let lines = comment[3..comment.len() - 2] .lines() .map(|s| s.to_string()) .collect::<Vec<String>>(); let lines = vertical_trim(lines); let lines = horizontal_trim(lines); return lines.join("\n"); } panic!("not a doc-comment: {}", comment); } fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) { debug!(">>> blank-line comment"); comments.push(Comment { style: BlankLine, lines: Vec::new(), pos: rdr.pos, }); } fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec<Comment>) { while is_pattern_whitespace(rdr.ch) && !rdr.is_eof() { if rdr.ch_is('\n') { push_blank_line_comment(rdr, &mut *comments); } rdr.bump(); } } fn read_shebang_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec<Comment>) { debug!(">>> shebang comment"); let p = rdr.pos; debug!("<<< shebang comment"); comments.push(Comment { style: if code_to_the_left { Trailing } else { Isolated }, lines: vec![rdr.read_one_line_comment()], pos: p, }); } fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec<Comment>) { debug!(">>> line comments"); let p = rdr.pos; let mut lines: Vec<String> = Vec::new(); while rdr.ch_is('/') && rdr.nextch_is('/') { let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. if is_doc_comment(&line[..]) { break; } lines.push(line); rdr.consume_non_eol_whitespace(); } debug!("<<< line comments"); if !lines.is_empty() { comments.push(Comment { style: if code_to_the_left { Trailing } else { Isolated }, lines, pos: p, }); } } /// Returns None if the first col chars of s contain a non-whitespace char. /// Otherwise returns Some(k) where k is first char offset after that leading /// whitespace. Note k may be outside bounds of s. fn all_whitespace(s: &str, col: CharPos) -> Option<usize> { let len = s.len(); let mut col = col.to_usize(); let mut cursor: usize = 0; while col > 0 && cursor < len { let ch = char_at(s, cursor); if !ch.is_whitespace() { return None; } cursor += ch.len_utf8(); col -= 1; } Some(cursor) } fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String>, s: String, col: CharPos) { let len = s.len(); let s1 = match all_whitespace(&s[..], col) { Some(col) => { if col < len { (&s[col..len]).to_string() } else { "".to_string() } } None => s, }; debug!("pushing line: {}", s1); lines.push(s1); } fn read_block_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec<Comment>) { debug!(">>> block comment"); let p = rdr.pos; let mut lines: Vec<String> = Vec::new(); // Count the number of chars since the start of the line by rescanning. let mut src_index = rdr.src_index(rdr.source_file.line_begin_pos(rdr.pos)); let end_src_index = rdr.src_index(rdr.pos); assert!(src_index <= end_src_index, "src_index={}, end_src_index={}, line_begin_pos={}", src_index, end_src_index, rdr.source_file.line_begin_pos(rdr.pos).to_u32()); let mut n = 0; while src_index < end_src_index { let c = char_at(&rdr.src, src_index); src_index += c.len_utf8(); n += 1; } let col = CharPos(n); rdr.bump(); rdr.bump(); let mut curr_line = String::from("/*"); // doc-comments are not really comments, they are attributes if (rdr.ch_is('*') && !rdr.nextch_is('*')) || rdr.ch_is('!') { while !(rdr.ch_is('*') && rdr.nextch_is('/')) && !rdr.is_eof() { curr_line.push(rdr.ch.unwrap()); rdr.bump(); } if !rdr.is_eof() { curr_line.push_str("*/"); rdr.bump(); rdr.bump(); } if is_block_doc_comment(&curr_line[..]) { return; } assert!(!curr_line.contains('\n')); lines.push(curr_line); } else { let mut level: isize = 1; while level > 0 { debug!("=== block comment level {}", level); if rdr.is_eof() { rdr.fatal("unterminated block comment").raise(); } if rdr.ch_is('\n') { trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col); curr_line = String::new(); rdr.bump(); } else { curr_line.push(rdr.ch.unwrap()); if rdr.ch_is('/') && rdr.nextch_is('*') { rdr.bump(); rdr.bump(); curr_line.push('*'); level += 1; } else { if rdr.ch_is('*') && rdr.nextch_is('/') { rdr.bump(); rdr.bump(); curr_line.push('/'); level -= 1; } else { rdr.bump(); } } } } if !curr_line.is_empty() { trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col); } } let mut style = if code_to_the_left { Trailing } else { Isolated }; rdr.consume_non_eol_whitespace(); if !rdr.is_eof() && !rdr.ch_is('\n') && lines.len() == 1 { style = Mixed; } debug!("<<< block comment"); comments.push(Comment { style, lines, pos: p, }); } fn consume_comment(rdr: &mut StringReader, comments: &mut Vec<Comment>, code_to_the_left: &mut bool, anything_to_the_left: &mut bool) { debug!(">>> consume comment"); if rdr.ch_is('/') && rdr.nextch_is('/') { read_line_comments(rdr, *code_to_the_left, comments); *code_to_the_left = false; *anything_to_the_left = false; } else if rdr.ch_is('/') && rdr.nextch_is('*') { read_block_comment(rdr, *code_to_the_left, comments); *anything_to_the_left = true; } else if rdr.ch_is('#') && rdr.nextch_is('!') { read_shebang_comment(rdr, *code_to_the_left, comments); *code_to_the_left = false; *anything_to_the_left = false; } else { panic!(); } debug!("<<< consume comment"); } #[derive(Clone)] pub struct Literal { pub lit: String, pub pos: BytePos, } // it appears this function is called only from pprust... that's // probably not a good thing. pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) -> (Vec<Comment>, Vec<Literal>) { let mut src = String::new(); srdr.read_to_string(&mut src).unwrap(); let cm = SourceMap::new(sess.source_map().path_mapping().clone()); let source_file = cm.new_source_file(path, src); let mut rdr = lexer::StringReader::new_raw(sess, source_file, None); let mut comments: Vec<Comment> = Vec::new(); let mut literals: Vec<Literal> = Vec::new(); let mut code_to_the_left = false; // Only code let mut anything_to_the_left = false; // Code or comments while !rdr.is_eof() { loop { // Eat all the whitespace and count blank lines. rdr.consume_non_eol_whitespace(); if rdr.ch_is('\n') { if anything_to_the_left { rdr.bump(); // The line is not blank, do not count. } consume_whitespace_counting_blank_lines(&mut rdr, &mut comments); code_to_the_left = false; anything_to_the_left = false; } // Eat one comment group if rdr.peeking_at_comment() { consume_comment(&mut rdr, &mut comments, &mut code_to_the_left, &mut anything_to_the_left); } else { break } } let bstart = rdr.pos; rdr.next_token(); // discard, and look ahead; we're working with internal state let TokenAndSpan { tok, sp } = rdr.peek(); if tok.is_lit() { rdr.with_str_from(bstart, |s| { debug!("tok lit: {}", s); literals.push(Literal { lit: s.to_string(), pos: sp.lo(), }); }) } else { debug!("tok: {}", pprust::token_to_string(&tok)); } code_to_the_left = true; anything_to_the_left = true; } (comments, literals) } #[cfg(test)] mod tests { use super::*; #[test] fn test_block_doc_comment_1() { let comment = "/**\n * Test \n ** Test\n * Test\n*/"; let stripped = strip_doc_comment_decoration(comment); assert_eq!(stripped, " Test \n* Test\n Test"); } #[test] fn test_block_doc_comment_2() { let comment = "/**\n * Test\n * Test\n*/"; let stripped = strip_doc_comment_decoration(comment); assert_eq!(stripped, " Test\n Test"); } #[test] fn test_block_doc_comment_3() { let comment = "/**\n let a: *i32;\n *a = 5;\n*/"; let stripped = strip_doc_comment_decoration(comment); assert_eq!(stripped, " let a: *i32;\n *a = 5;"); } #[test] fn test_block_doc_comment_4() { let comment = "/*******************\n test\n *********************/"; let stripped = strip_doc_comment_decoration(comment); assert_eq!(stripped, " test"); } #[test] fn test_line_doc_comment() { let stripped = strip_doc_comment_decoration("/// test"); assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("///! test"); assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("// test"); assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("// test"); assert_eq!(stripped, " test"); let stripped = strip_doc_comment_decoration("///test"); assert_eq!(stripped, "test"); let stripped = strip_doc_comment_decoration("///!test"); assert_eq!(stripped, "test"); let stripped = strip_doc_comment_decoration("//test"); assert_eq!(stripped, "test"); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/lexer/tokentrees.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use print::pprust::token_to_string; use parse::lexer::StringReader; use parse::{token, PResult}; use tokenstream::{Delimited, TokenStream, TokenTree}; impl<'a> StringReader<'a> { // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. crate fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); while self.token != token::Eof { tts.push(self.parse_token_tree()?); } Ok(TokenStream::concat(tts)) } // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`. fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { let mut tts = vec![]; loop { if let token::CloseDelim(..) = self.token { return TokenStream::concat(tts); } match self.parse_token_tree() { Ok(tree) => tts.push(tree), Err(mut e) => { e.emit(); return TokenStream::concat(tts); } } } } fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> { match self.token { token::Eof => { let msg = "this file contains an un-closed delimiter"; let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg); for &(_, sp) in &self.open_braces { err.span_help(sp, "did you mean to close this delimiter?"); } Err(err) }, token::OpenDelim(delim) => { // The span for beginning of the delimited section let pre_span = self.span; // Parse the open delimiter. self.open_braces.push((delim, self.span)); self.real_token(); // Parse the token trees within the delimiters. // We stop at any delimiter so we can try to recover if the user // uses an incorrect delimiter. let tts = self.parse_token_trees_until_close_delim(); // Expand to cover the entire delimited token tree let span = pre_span.with_hi(self.span.hi()); match self.token { // Correct delimiter. token::CloseDelim(d) if d == delim => { self.open_braces.pop().unwrap(); // Parse the close delimiter. self.real_token(); } // Incorrect delimiter. token::CloseDelim(other) => { let token_str = token_to_string(&self.token); if self.last_unclosed_found_span != Some(self.span) { // do not complain about the same unclosed delimiter multiple times self.last_unclosed_found_span = Some(self.span); let msg = format!("incorrect close delimiter: `{}`", token_str); let mut err = self.sess.span_diagnostic.struct_span_err( self.span, &msg, ); err.span_label(self.span, "incorrect close delimiter"); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { err.span_label(sp, "unclosed delimiter"); }; err.emit(); } self.open_braces.pop().unwrap(); // If the incorrect delimiter matches an earlier opening // delimiter, then don't consume it (it can be used to // close the earlier one). Otherwise, consume it. // E.g., we try to recover from: // fn foo() { // bar(baz( // } // Incorrect delimiter but matches the earlier `{` if !self.open_braces.iter().any(|&(b, _)| b == other) { self.real_token(); } } token::Eof => { // Silently recover, the EOF token will be seen again // and an error emitted then. Thus we don't pop from // self.open_braces here. }, _ => {} } Ok(TokenTree::Delimited(span, Delimited { delim, tts: tts.into(), }).into()) }, token::CloseDelim(_) => { // An unexpected closing delimiter (i.e., there is no // matching opening delimiter). let token_str = token_to_string(&self.token); let msg = format!("unexpected close delimiter: `{}`", token_str); let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg); err.span_label(self.span, "unexpected close delimiter"); Err(err) }, _ => { let tt = TokenTree::Token(self.span, self.token.clone()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. let raw = self.span_src_raw; self.real_token(); let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token); Ok(if is_joint { tt.joint() } else { tt.into() }) } } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/parse/lexer/unicode_chars.rs
// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Characters and their corresponding confusables were collected from // http://www.unicode.org/Public/security/10.0.0/confusables.txt use syntax_pos::{Span, NO_EXPANSION}; use errors::{Applicability, DiagnosticBuilder}; use super::StringReader; const UNICODE_ARRAY: &[(char, &str, char)] = &[ ('
', "Line Separator", ' '), ('
', "Paragraph Separator", ' '), (' ', "Ogham Space mark", ' '), (' ', "En Quad", ' '), (' ', "Em Quad", ' '), (' ', "En Space", ' '), (' ', "Em Space", ' '), (' ', "Three-Per-Em Space", ' '), (' ', "Four-Per-Em Space", ' '), (' ', "Six-Per-Em Space", ' '), (' ', "Punctuation Space", ' '), (' ', "Thin Space", ' '), (' ', "Hair Space", ' '), (' ', "Medium Mathematical Space", ' '), (' ', "No-Break Space", ' '), (' ', "Figure Space", ' '), (' ', "Narrow No-Break Space", ' '), (' ', "Ideographic Space", ' '), ('ߺ', "Nko Lajanyalan", '_'), ('﹍', "Dashed Low Line", '_'), ('﹎', "Centreline Low Line", '_'), ('﹏', "Wavy Low Line", '_'), ('_', "Fullwidth Low Line", '_'), ('‐', "Hyphen", '-'), ('‑', "Non-Breaking Hyphen", '-'), ('‒', "Figure Dash", '-'), ('–', "En Dash", '-'), ('—', "Em Dash", '-'), ('﹘', "Small Em Dash", '-'), ('۔', "Arabic Full Stop", '-'), ('⁃', "Hyphen Bullet", '-'), ('˗', "Modifier Letter Minus Sign", '-'), ('−', "Minus Sign", '-'), ('➖', "Heavy Minus Sign", '-'), ('Ⲻ', "Coptic Letter Dialect-P Ni", '-'), ('ー', "Katakana-Hiragana Prolonged Sound Mark", '-'), ('-', "Fullwidth Hyphen-Minus", '-'), ('―', "Horizontal Bar", '-'), ('─', "Box Drawings Light Horizontal", '-'), ('━', "Box Drawings Heavy Horizontal", '-'), ('㇐', "CJK Stroke H", '-'), ('ꟷ', "Latin Epigraphic Letter Dideways", '-'), ('ᅳ', "Hangul Jungseong Eu", '-'), ('ㅡ', "Hangul Letter Eu", '-'), ('一', "CJK Unified Ideograph-4E00", '-'), ('⼀', "Kangxi Radical One", '-'), ('؍', "Arabic Date Separator", ','), ('٫', "Arabic Decimal Separator", ','), ('‚', "Single Low-9 Quotation Mark", ','), ('¸', "Cedilla", ','), ('ꓹ', "Lisu Letter Tone Na Po", ','), (',', "Fullwidth Comma", ','), (';', "Greek Question Mark", ';'), (';', "Fullwidth Semicolon", ';'), ('︔', "Presentation Form For Vertical Semicolon", ';'), ('ः', "Devanagari Sign Visarga", ':'), ('ઃ', "Gujarati Sign Visarga", ':'), (':', "Fullwidth Colon", ':'), ('։', "Armenian Full Stop", ':'), ('܃', "Syriac Supralinear Colon", ':'), ('܄', "Syriac Sublinear Colon", ':'), ('᛬', "Runic Multiple Punctuation", ':'), ('︰', "Presentation Form For Vertical Two Dot Leader", ':'), ('᠃', "Mongolian Full Stop", ':'), ('᠉', "Mongolian Manchu Full Stop", ':'), ('⁚', "Two Dot Punctuation", ':'), ('׃', "Hebrew Punctuation Sof Pasuq", ':'), ('˸', "Modifier Letter Raised Colon", ':'), ('꞉', "Modifier Letter Colon", ':'), ('∶', "Ratio", ':'), ('ː', "Modifier Letter Triangular Colon", ':'), ('ꓽ', "Lisu Letter Tone Mya Jeu", ':'), ('︓', "Presentation Form For Vertical Colon", ':'), ('!', "Fullwidth Exclamation Mark", '!'), ('ǃ', "Latin Letter Retroflex Click", '!'), ('ⵑ', "Tifinagh Letter Tuareg Yang", '!'), ('︕', "Presentation Form For Vertical Exclamation Mark", '!'), ('ʔ', "Latin Letter Glottal Stop", '?'), ('Ɂ', "Latin Capital Letter Glottal Stop", '?'), ('ॽ', "Devanagari Letter Glottal Stop", '?'), ('Ꭾ', "Cherokee Letter He", '?'), ('ꛫ', "Bamum Letter Ntuu", '?'), ('?', "Fullwidth Question Mark", '?'), ('︖', "Presentation Form For Vertical Question Mark", '?'), ('𝅭', "Musical Symbol Combining Augmentation Dot", '.'), ('․', "One Dot Leader", '.'), ('܁', "Syriac Supralinear Full Stop", '.'), ('܂', "Syriac Sublinear Full Stop", '.'), ('꘎', "Vai Full Stop", '.'), ('𐩐', "Kharoshthi Punctuation Dot", '.'), ('٠', "Arabic-Indic Digit Zero", '.'), ('۰', "Extended Arabic-Indic Digit Zero", '.'), ('ꓸ', "Lisu Letter Tone Mya Ti", '.'), ('·', "Middle Dot", '.'), ('・', "Katakana Middle Dot", '.'), ('・', "Halfwidth Katakana Middle Dot", '.'), ('᛫', "Runic Single Punctuation", '.'), ('·', "Greek Ano Teleia", '.'), ('⸱', "Word Separator Middle Dot", '.'), ('𐄁', "Aegean Word Separator Dot", '.'), ('•', "Bullet", '.'), ('‧', "Hyphenation Point", '.'), ('∙', "Bullet Operator", '.'), ('⋅', "Dot Operator", '.'), ('ꞏ', "Latin Letter Sinological Dot", '.'), ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'), ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'), ('.', "Fullwidth Full Stop", '.'), ('。', "Ideographic Full Stop", '.'), ('︒', "Presentation Form For Vertical Ideographic Full Stop", '.'), ('՝', "Armenian Comma", '\''), (''', "Fullwidth Apostrophe", '\''), ('‘', "Left Single Quotation Mark", '\''), ('’', "Right Single Quotation Mark", '\''), ('‛', "Single High-Reversed-9 Quotation Mark", '\''), ('′', "Prime", '\''), ('‵', "Reversed Prime", '\''), ('՚', "Armenian Apostrophe", '\''), ('׳', "Hebrew Punctuation Geresh", '\''), ('`', "Grave Accent", '\''), ('`', "Greek Varia", '\''), ('`', "Fullwidth Grave Accent", '\''), ('´', "Acute Accent", '\''), ('΄', "Greek Tonos", '\''), ('´', "Greek Oxia", '\''), ('᾽', "Greek Koronis", '\''), ('᾿', "Greek Psili", '\''), ('῾', "Greek Dasia", '\''), ('ʹ', "Modifier Letter Prime", '\''), ('ʹ', "Greek Numeral Sign", '\''), ('ˈ', "Modifier Letter Vertical Line", '\''), ('ˊ', "Modifier Letter Acute Accent", '\''), ('ˋ', "Modifier Letter Grave Accent", '\''), ('˴', "Modifier Letter Middle Grave Accent", '\''), ('ʻ', "Modifier Letter Turned Comma", '\''), ('ʽ', "Modifier Letter Reversed Comma", '\''), ('ʼ', "Modifier Letter Apostrophe", '\''), ('ʾ', "Modifier Letter Right Half Ring", '\''), ('ꞌ', "Latin Small Letter Saltillo", '\''), ('י', "Hebrew Letter Yod", '\''), ('ߴ', "Nko High Tone Apostrophe", '\''), ('ߵ', "Nko Low Tone Apostrophe", '\''), ('ᑊ', "Canadian Syllabics West-Cree P", '\''), ('ᛌ', "Runic Letter Short-Twig-Sol S", '\''), ('𖽑', "Miao Sign Aspiration", '\''), ('𖽒', "Miao Sign Reformed Voicing", '\''), ('᳓', "Vedic Sign Nihshvasa", '"'), ('"', "Fullwidth Quotation Mark", '"'), ('“', "Left Double Quotation Mark", '"'), ('”', "Right Double Quotation Mark", '"'), ('‟', "Double High-Reversed-9 Quotation Mark", '"'), ('″', "Double Prime", '"'), ('‶', "Reversed Double Prime", '"'), ('〃', "Ditto Mark", '"'), ('״', "Hebrew Punctuation Gershayim", '"'), ('˝', "Double Acute Accent", '"'), ('ʺ', "Modifier Letter Double Prime", '"'), ('˶', "Modifier Letter Middle Double Acute Accent", '"'), ('˵', "Modifier Letter Middle Double Grave Accent", '"'), ('ˮ', "Modifier Letter Double Apostrophe", '"'), ('ײ', "Hebrew Ligature Yiddish Double Yod", '"'), ('❞', "Heavy Double Comma Quotation Mark Ornament", '"'), ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", '"'), ('(', "Fullwidth Left Parenthesis", '('), ('❨', "Medium Left Parenthesis Ornament", '('), ('﴾', "Ornate Left Parenthesis", '('), (')', "Fullwidth Right Parenthesis", ')'), ('❩', "Medium Right Parenthesis Ornament", ')'), ('﴿', "Ornate Right Parenthesis", ')'), ('[', "Fullwidth Left Square Bracket", '['), ('❲', "Light Left Tortoise Shell Bracket Ornament", '['), ('「', "Left Corner Bracket", '['), ('『', "Left White Corner Bracket", '['), ('【', "Left Black Lenticular Bracket", '['), ('〔', "Left Tortoise Shell Bracket", '['), ('〖', "Left White Lenticular Bracket", '['), ('〘', "Left White Tortoise Shell Bracket", '['), ('〚', "Left White Square Bracket", '['), (']', "Fullwidth Right Square Bracket", ']'), ('❳', "Light Right Tortoise Shell Bracket Ornament", ']'), ('」', "Right Corner Bracket", ']'), ('』', "Right White Corner Bracket", ']'), ('】', "Right Black Lenticular Bracket", ']'), ('〕', "Right Tortoise Shell Bracket", ']'), ('〗', "Right White Lenticular Bracket", ']'), ('〙', "Right White Tortoise Shell Bracket", ']'), ('〛', "Right White Square Bracket", ']'), ('❴', "Medium Left Curly Bracket Ornament", '{'), ('𝄔', "Musical Symbol Brace", '{'), ('{', "Fullwidth Left Curly Bracket", '{'), ('❵', "Medium Right Curly Bracket Ornament", '}'), ('}', "Fullwidth Right Curly Bracket", '}'), ('⁎', "Low Asterisk", '*'), ('٭', "Arabic Five Pointed Star", '*'), ('∗', "Asterisk Operator", '*'), ('𐌟', "Old Italic Letter Ess", '*'), ('*', "Fullwidth Asterisk", '*'), ('᜵', "Philippine Single Punctuation", '/'), ('⁁', "Caret Insertion Point", '/'), ('∕', "Division Slash", '/'), ('⁄', "Fraction Slash", '/'), ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", '/'), ('⟋', "Mathematical Rising Diagonal", '/'), ('⧸', "Big Solidus", '/'), ('𝈺', "Greek Instrumental Notation Symbol-47", '/'), ('㇓', "CJK Stroke Sp", '/'), ('〳', "Vertical Kana Repeat Mark Upper Half", '/'), ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", '/'), ('ノ', "Katakana Letter No", '/'), ('丿', "CJK Unified Ideograph-4E3F", '/'), ('⼃', "Kangxi Radical Slash", '/'), ('/', "Fullwidth Solidus", '/'), ('\', "Fullwidth Reverse Solidus", '\\'), ('﹨', "Small Reverse Solidus", '\\'), ('∖', "Set Minus", '\\'), ('⟍', "Mathematical Falling Diagonal", '\\'), ('⧵', "Reverse Solidus Operator", '\\'), ('⧹', "Big Reverse Solidus", '\\'), ('⧹', "Greek Vocal Notation Symbol-16", '\\'), ('⧹', "Greek Instrumental Symbol-48", '\\'), ('㇔', "CJK Stroke D", '\\'), ('丶', "CJK Unified Ideograph-4E36", '\\'), ('⼂', "Kangxi Radical Dot", '\\'), ('、', "Ideographic Comma", '\\'), ('ヽ', "Katakana Iteration Mark", '\\'), ('ꝸ', "Latin Small Letter Um", '&'), ('&', "Fullwidth Ampersand", '&'), ('᛭', "Runic Cross Punctuation", '+'), ('➕', "Heavy Plus Sign", '+'), ('𐊛', "Lycian Letter H", '+'), ('﬩', "Hebrew Letter Alternative Plus Sign", '+'), ('+', "Fullwidth Plus Sign", '+'), ('‹', "Single Left-Pointing Angle Quotation Mark", '<'), ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", '<'), ('˂', "Modifier Letter Left Arrowhead", '<'), ('𝈶', "Greek Instrumental Symbol-40", '<'), ('ᐸ', "Canadian Syllabics Pa", '<'), ('ᚲ', "Runic Letter Kauna", '<'), ('❬', "Medium Left-Pointing Angle Bracket Ornament", '<'), ('⟨', "Mathematical Left Angle Bracket", '<'), ('〈', "Left-Pointing Angle Bracket", '<'), ('〈', "Left Angle Bracket", '<'), ('㇛', "CJK Stroke Pd", '<'), ('く', "Hiragana Letter Ku", '<'), ('𡿨', "CJK Unified Ideograph-21FE8", '<'), ('《', "Left Double Angle Bracket", '<'), ('<', "Fullwidth Less-Than Sign", '<'), ('᐀', "Canadian Syllabics Hyphen", '='), ('⹀', "Double Hyphen", '='), ('゠', "Katakana-Hiragana Double Hyphen", '='), ('꓿', "Lisu Punctuation Full Stop", '='), ('=', "Fullwidth Equals Sign", '='), ('›', "Single Right-Pointing Angle Quotation Mark", '>'), ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", '>'), ('˃', "Modifier Letter Right Arrowhead", '>'), ('𝈷', "Greek Instrumental Symbol-42", '>'), ('ᐳ', "Canadian Syllabics Po", '>'), ('𖼿', "Miao Letter Archaic Zza", '>'), ('❭', "Medium Right-Pointing Angle Bracket Ornament", '>'), ('⟩', "Mathematical Right Angle Bracket", '>'), ('〉', "Right-Pointing Angle Bracket", '>'), ('〉', "Right Angle Bracket", '>'), ('》', "Right Double Angle Bracket", '>'), ('>', "Fullwidth Greater-Than Sign", '>'), ]; const ASCII_ARRAY: &'static [(char, &'static str)] = &[ (' ', "Space"), ('_', "Underscore"), ('-', "Minus/Hyphen"), (',', "Comma"), (';', "Semicolon"), (':', "Colon"), ('!', "Exclamation Mark"), ('?', "Question Mark"), ('.', "Period"), ('\'', "Single Quote"), ('"', "Quotation Mark"), ('(', "Left Parenthesis"), (')', "Right Parenthesis"), ('[', "Left Square Bracket"), (']', "Right Square Bracket"), ('{', "Left Curly Brace"), ('}', "Right Curly Brace"), ('*', "Asterisk"), ('/', "Slash"), ('\\', "Backslash"), ('&', "Ampersand"), ('+', "Plus Sign"), ('<', "Less-Than Sign"), ('=', "Equals Sign"), ('>', "Greater-Than Sign"), ]; crate fn check_for_substitution<'a>(reader: &StringReader<'a>, ch: char, err: &mut DiagnosticBuilder<'a>) -> bool { UNICODE_ARRAY .iter() .find(|&&(c, _, _)| c == ch) .map(|&(_, u_name, ascii_char)| { let span = Span::new(reader.pos, reader.next_pos, NO_EXPANSION); match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) { Some(&(ascii_char, ascii_name)) => { let msg = format!("Unicode character '{}' ({}) looks like '{}' ({}), but it is not", ch, u_name, ascii_char, ascii_name); err.span_suggestion_with_applicability( span, &msg, ascii_char.to_string(), Applicability::MaybeIncorrect); true }, None => { let msg = format!("substitution character not found for '{}'", ch); reader.sess.span_diagnostic.span_bug_no_panic(span, &msg); false } } }).unwrap_or(false) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/print/pp.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This pretty-printer is a direct reimplementation of Philip Karlton's //! Mesa pretty-printer, as described in appendix A of //! //! ````text //! STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen. //! Stanford Department of Computer Science, 1979. //! ```` //! //! The algorithm's aim is to break a stream into as few lines as possible //! while respecting the indentation-consistency requirements of the enclosing //! block, and avoiding breaking at silly places on block boundaries, for //! example, between "x" and ")" in "x)". //! //! I am implementing this algorithm because it comes with 20 pages of //! documentation explaining its theory, and because it addresses the set of //! concerns I've seen other pretty-printers fall down on. Weirdly. Even though //! it's 32 years old. What can I say? //! //! Despite some redundancies and quirks in the way it's implemented in that //! paper, I've opted to keep the implementation here as similar as I can, //! changing only what was blatantly wrong, a typo, or sufficiently //! non-idiomatic rust that it really stuck out. //! //! In particular you'll see a certain amount of churn related to INTEGER vs. //! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two //! somewhat readily? In any case, I've used usize for indices-in-buffers and //! ints for character-sizes-and-indentation-offsets. This respects the need //! for ints to "go negative" while carrying a pending-calculation balance, and //! helps differentiate all the numbers flying around internally (slightly). //! //! I also inverted the indentation arithmetic used in the print stack, since //! the Mesa implementation (somewhat randomly) stores the offset on the print //! stack in terms of margin-col rather than col itself. I store col. //! //! I also implemented a small change in the String token, in that I store an //! explicit length for the string. For most tokens this is just the length of //! the accompanying string. But it's necessary to permit it to differ, for //! encoding things that are supposed to "go on their own line" -- certain //! classes of comment and blank-line -- where relying on adjacent //! hardbreak-like Break tokens with long blankness indication doesn't actually //! work. To see why, consider when there is a "thing that should be on its own //! line" between two long blocks, say functions. If you put a hardbreak after //! each function (or before each) and the breaking algorithm decides to break //! there anyways (because the functions themselves are long) you wind up with //! extra blank lines. If you don't put hardbreaks you can wind up with the //! "thing which should be on its own line" not getting its own line in the //! rare case of "really small functions" or such. This re-occurs with comments //! and explicit blank lines. So in those cases we use a string with a payload //! we want isolated to a line and an explicit length that's huge, surrounded //! by two zero-length breaks. The algorithm will try its best to fit it on a //! line (which it can't) and so naturally place the content on its own line to //! avoid combining it with other lines and making matters even worse. //! //! # Explanation //! //! In case you do not have the paper, here is an explanation of what's going //! on. //! //! There is a stream of input tokens flowing through this printer. //! //! The printer buffers up to 3N tokens inside itself, where N is linewidth. //! Yes, linewidth is chars and tokens are multi-char, but in the worst //! case every token worth buffering is 1 char long, so it's ok. //! //! Tokens are String, Break, and Begin/End to delimit blocks. //! //! Begin tokens can carry an offset, saying "how far to indent when you break //! inside here", as well as a flag indicating "consistent" or "inconsistent" //! breaking. Consistent breaking means that after the first break, no attempt //! will be made to flow subsequent breaks together onto lines. Inconsistent //! is the opposite. Inconsistent breaking example would be, say: //! //! ``` //! foo(hello, there, good, friends) //! ``` //! //! breaking inconsistently to become //! //! ``` //! foo(hello, there //! good, friends); //! ``` //! //! whereas a consistent breaking would yield: //! //! ``` //! foo(hello, //! there //! good, //! friends); //! ``` //! //! That is, in the consistent-break blocks we value vertical alignment //! more than the ability to cram stuff onto a line. But in all cases if it //! can make a block a one-liner, it'll do so. //! //! Carrying on with high-level logic: //! //! The buffered tokens go through a ring-buffer, 'tokens'. The 'left' and //! 'right' indices denote the active portion of the ring buffer as well as //! describing hypothetical points-in-the-infinite-stream at most 3N tokens //! apart (i.e. "not wrapped to ring-buffer boundaries"). The paper will switch //! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer //! and point-in-infinite-stream senses freely. //! //! There is a parallel ring buffer, `size`, that holds the calculated size of //! each token. Why calculated? Because for Begin/End pairs, the "size" //! includes everything between the pair. That is, the "size" of Begin is //! actually the sum of the sizes of everything between Begin and the paired //! End that follows. Since that is arbitrarily far in the future, `size` is //! being rewritten regularly while the printer runs; in fact most of the //! machinery is here to work out `size` entries on the fly (and give up when //! they're so obviously over-long that "infinity" is a good enough //! approximation for purposes of line breaking). //! //! The "input side" of the printer is managed as an abstract process called //! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in //! other words, the process of calculating 'size' entries. //! //! The "output side" of the printer is managed by an abstract process called //! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to //! do with each token/size pair it consumes as it goes. It's trying to consume //! the entire buffered window, but can't output anything until the size is >= //! 0 (sizes are set to negative while they're pending calculation). //! //! So SCAN takes input and buffers tokens and pending calculations, while //! PRINT gobbles up completed calculations and tokens from the buffer. The //! theory is that the two can never get more than 3N tokens apart, because //! once there's "obviously" too much data to fit on a line, in a size //! calculation, SCAN will write "infinity" to the size and let PRINT consume //! it. //! //! In this implementation (following the paper, again) the SCAN process is //! the method called `Printer::pretty_print`, and the 'PRINT' process is the method //! called `Printer::print`. use std::collections::VecDeque; use std::fmt; use std::io; /// How to break. Described in more detail in the module docs. #[derive(Clone, Copy, PartialEq)] pub enum Breaks { Consistent, Inconsistent, } #[derive(Clone, Copy)] pub struct BreakToken { offset: isize, blank_space: isize } #[derive(Clone, Copy)] pub struct BeginToken { offset: isize, breaks: Breaks } #[derive(Clone)] pub enum Token { String(String, isize), Break(BreakToken), Begin(BeginToken), End, Eof, } impl Token { pub fn is_eof(&self) -> bool { match *self { Token::Eof => true, _ => false, } } pub fn is_hardbreak_tok(&self) -> bool { match *self { Token::Break(BreakToken { offset: 0, blank_space: bs }) if bs == SIZE_INFINITY => true, _ => false } } } impl fmt::Display for Token { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Token::String(ref s, len) => write!(f, "STR({},{})", s, len), Token::Break(_) => f.write_str("BREAK"), Token::Begin(_) => f.write_str("BEGIN"), Token::End => f.write_str("END"), Token::Eof => f.write_str("EOF"), } } } fn buf_str(buf: &[BufEntry], left: usize, right: usize, lim: usize) -> String { let n = buf.len(); let mut i = left; let mut l = lim; let mut s = String::from("["); while i != right && l != 0 { l -= 1; if i != left { s.push_str(", "); } s.push_str(&format!("{}={}", buf[i].size, &buf[i].token)); i += 1; i %= n; } s.push(']'); s } #[derive(Copy, Clone)] pub enum PrintStackBreak { Fits, Broken(Breaks), } #[derive(Copy, Clone)] pub struct PrintStackElem { offset: isize, pbreak: PrintStackBreak } const SIZE_INFINITY: isize = 0xffff; pub fn mk_printer<'a>(out: Box<dyn io::Write+'a>, linewidth: usize) -> Printer<'a> { // Yes 55, it makes the ring buffers big enough to never fall behind. let n: usize = 55 * linewidth; debug!("mk_printer {}", linewidth); Printer { out, buf_max_len: n, margin: linewidth as isize, space: linewidth as isize, left: 0, right: 0, // Initialize a single entry; advance_right() will extend it on demand // up to `buf_max_len` elements. buf: vec![BufEntry::default()], left_total: 0, right_total: 0, scan_stack: VecDeque::new(), print_stack: Vec::new(), pending_indentation: 0 } } pub struct Printer<'a> { out: Box<dyn io::Write+'a>, buf_max_len: usize, /// Width of lines we're constrained to margin: isize, /// Number of spaces left on line space: isize, /// Index of left side of input stream left: usize, /// Index of right side of input stream right: usize, /// Ring-buffer of tokens and calculated sizes buf: Vec<BufEntry>, /// Running size of stream "...left" left_total: isize, /// Running size of stream "...right" right_total: isize, /// Pseudo-stack, really a ring too. Holds the /// primary-ring-buffers index of the Begin that started the /// current block, possibly with the most recent Break after that /// Begin (if there is any) on top of it. Stuff is flushed off the /// bottom as it becomes irrelevant due to the primary ring-buffer /// advancing. scan_stack: VecDeque<usize>, /// Stack of blocks-in-progress being flushed by print print_stack: Vec<PrintStackElem> , /// Buffered indentation to avoid writing trailing whitespace pending_indentation: isize, } #[derive(Clone)] struct BufEntry { token: Token, size: isize, } impl Default for BufEntry { fn default() -> Self { BufEntry { token: Token::Eof, size: 0 } } } impl<'a> Printer<'a> { pub fn last_token(&mut self) -> Token { self.buf[self.right].token.clone() } /// be very careful with this! pub fn replace_last_token(&mut self, t: Token) { self.buf[self.right].token = t; } pub fn pretty_print(&mut self, token: Token) -> io::Result<()> { debug!("pp Vec<{},{}>", self.left, self.right); match token { Token::Eof => { if !self.scan_stack.is_empty() { self.check_stack(0); self.advance_left()?; } self.indent(0); Ok(()) } Token::Begin(b) => { if self.scan_stack.is_empty() { self.left_total = 1; self.right_total = 1; self.left = 0; self.right = 0; } else { self.advance_right(); } debug!("pp Begin({})/buffer Vec<{},{}>", b.offset, self.left, self.right); self.buf[self.right] = BufEntry { token: token, size: -self.right_total }; let right = self.right; self.scan_push(right); Ok(()) } Token::End => { if self.scan_stack.is_empty() { debug!("pp End/print Vec<{},{}>", self.left, self.right); self.print(token, 0) } else { debug!("pp End/buffer Vec<{},{}>", self.left, self.right); self.advance_right(); self.buf[self.right] = BufEntry { token: token, size: -1 }; let right = self.right; self.scan_push(right); Ok(()) } } Token::Break(b) => { if self.scan_stack.is_empty() { self.left_total = 1; self.right_total = 1; self.left = 0; self.right = 0; } else { self.advance_right(); } debug!("pp Break({})/buffer Vec<{},{}>", b.offset, self.left, self.right); self.check_stack(0); let right = self.right; self.scan_push(right); self.buf[self.right] = BufEntry { token: token, size: -self.right_total }; self.right_total += b.blank_space; Ok(()) } Token::String(s, len) => { if self.scan_stack.is_empty() { debug!("pp String('{}')/print Vec<{},{}>", s, self.left, self.right); self.print(Token::String(s, len), len) } else { debug!("pp String('{}')/buffer Vec<{},{}>", s, self.left, self.right); self.advance_right(); self.buf[self.right] = BufEntry { token: Token::String(s, len), size: len }; self.right_total += len; self.check_stream() } } } } pub fn check_stream(&mut self) -> io::Result<()> { debug!("check_stream Vec<{}, {}> with left_total={}, right_total={}", self.left, self.right, self.left_total, self.right_total); if self.right_total - self.left_total > self.space { debug!("scan window is {}, longer than space on line ({})", self.right_total - self.left_total, self.space); if Some(&self.left) == self.scan_stack.back() { debug!("setting {} to infinity and popping", self.left); let scanned = self.scan_pop_bottom(); self.buf[scanned].size = SIZE_INFINITY; } self.advance_left()?; if self.left != self.right { self.check_stream()?; } } Ok(()) } pub fn scan_push(&mut self, x: usize) { debug!("scan_push {}", x); self.scan_stack.push_front(x); } pub fn scan_pop(&mut self) -> usize { self.scan_stack.pop_front().unwrap() } pub fn scan_top(&mut self) -> usize { *self.scan_stack.front().unwrap() } pub fn scan_pop_bottom(&mut self) -> usize { self.scan_stack.pop_back().unwrap() } pub fn advance_right(&mut self) { self.right += 1; self.right %= self.buf_max_len; // Extend the buf if necessary. if self.right == self.buf.len() { self.buf.push(BufEntry::default()); } assert_ne!(self.right, self.left); } pub fn advance_left(&mut self) -> io::Result<()> { debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right, self.left, self.buf[self.left].size); let mut left_size = self.buf[self.left].size; while left_size >= 0 { let left = self.buf[self.left].token.clone(); let len = match left { Token::Break(b) => b.blank_space, Token::String(_, len) => { assert_eq!(len, left_size); len } _ => 0 }; self.print(left, left_size)?; self.left_total += len; if self.left == self.right { break; } self.left += 1; self.left %= self.buf_max_len; left_size = self.buf[self.left].size; } Ok(()) } pub fn check_stack(&mut self, k: isize) { if !self.scan_stack.is_empty() { let x = self.scan_top(); match self.buf[x].token { Token::Begin(_) => { if k > 0 { let popped = self.scan_pop(); self.buf[popped].size = self.buf[x].size + self.right_total; self.check_stack(k - 1); } } Token::End => { // paper says + not =, but that makes no sense. let popped = self.scan_pop(); self.buf[popped].size = 1; self.check_stack(k + 1); } _ => { let popped = self.scan_pop(); self.buf[popped].size = self.buf[x].size + self.right_total; if k > 0 { self.check_stack(k); } } } } } pub fn print_newline(&mut self, amount: isize) -> io::Result<()> { debug!("NEWLINE {}", amount); let ret = write!(self.out, "\n"); self.pending_indentation = 0; self.indent(amount); ret } pub fn indent(&mut self, amount: isize) { debug!("INDENT {}", amount); self.pending_indentation += amount; } pub fn get_top(&mut self) -> PrintStackElem { match self.print_stack.last() { Some(el) => *el, None => PrintStackElem { offset: 0, pbreak: PrintStackBreak::Broken(Breaks::Inconsistent) } } } pub fn print_str(&mut self, s: &str) -> io::Result<()> { while self.pending_indentation > 0 { write!(self.out, " ")?; self.pending_indentation -= 1; } write!(self.out, "{}", s) } pub fn print(&mut self, token: Token, l: isize) -> io::Result<()> { debug!("print {} {} (remaining line space={})", token, l, self.space); debug!("{}", buf_str(&self.buf, self.left, self.right, 6)); match token { Token::Begin(b) => { if l > self.space { let col = self.margin - self.space + b.offset; debug!("print Begin -> push broken block at col {}", col); self.print_stack.push(PrintStackElem { offset: col, pbreak: PrintStackBreak::Broken(b.breaks) }); } else { debug!("print Begin -> push fitting block"); self.print_stack.push(PrintStackElem { offset: 0, pbreak: PrintStackBreak::Fits }); } Ok(()) } Token::End => { debug!("print End -> pop End"); let print_stack = &mut self.print_stack; assert!(!print_stack.is_empty()); print_stack.pop().unwrap(); Ok(()) } Token::Break(b) => { let top = self.get_top(); match top.pbreak { PrintStackBreak::Fits => { debug!("print Break({}) in fitting block", b.blank_space); self.space -= b.blank_space; self.indent(b.blank_space); Ok(()) } PrintStackBreak::Broken(Breaks::Consistent) => { debug!("print Break({}+{}) in consistent block", top.offset, b.offset); let ret = self.print_newline(top.offset + b.offset); self.space = self.margin - (top.offset + b.offset); ret } PrintStackBreak::Broken(Breaks::Inconsistent) => { if l > self.space { debug!("print Break({}+{}) w/ newline in inconsistent", top.offset, b.offset); let ret = self.print_newline(top.offset + b.offset); self.space = self.margin - (top.offset + b.offset); ret } else { debug!("print Break({}) w/o newline in inconsistent", b.blank_space); self.indent(b.blank_space); self.space -= b.blank_space; Ok(()) } } } } Token::String(ref s, len) => { debug!("print String({})", s); assert_eq!(l, len); // assert!(l <= space); self.space -= len; self.print_str(s) } Token::Eof => { // Eof should never get here. panic!(); } } } // Convenience functions to talk to the printer. /// "raw box" pub fn rbox(&mut self, indent: usize, b: Breaks) -> io::Result<()> { self.pretty_print(Token::Begin(BeginToken { offset: indent as isize, breaks: b })) } /// Inconsistent breaking box pub fn ibox(&mut self, indent: usize) -> io::Result<()> { self.rbox(indent, Breaks::Inconsistent) } /// Consistent breaking box pub fn cbox(&mut self, indent: usize) -> io::Result<()> { self.rbox(indent, Breaks::Consistent) } pub fn break_offset(&mut self, n: usize, off: isize) -> io::Result<()> { self.pretty_print(Token::Break(BreakToken { offset: off, blank_space: n as isize })) } pub fn end(&mut self) -> io::Result<()> { self.pretty_print(Token::End) } pub fn eof(&mut self) -> io::Result<()> { self.pretty_print(Token::Eof) } pub fn word(&mut self, wrd: &str) -> io::Result<()> { self.pretty_print(Token::String(wrd.to_string(), wrd.len() as isize)) } pub fn huge_word(&mut self, wrd: &str) -> io::Result<()> { self.pretty_print(Token::String(wrd.to_string(), SIZE_INFINITY)) } pub fn zero_word(&mut self, wrd: &str) -> io::Result<()> { self.pretty_print(Token::String(wrd.to_string(), 0)) } fn spaces(&mut self, n: usize) -> io::Result<()> { self.break_offset(n, 0) } pub fn zerobreak(&mut self) -> io::Result<()> { self.spaces(0) } pub fn space(&mut self) -> io::Result<()> { self.spaces(1) } pub fn hardbreak(&mut self) -> io::Result<()> { self.spaces(SIZE_INFINITY as usize) } pub fn hardbreak_tok_offset(off: isize) -> Token { Token::Break(BreakToken {offset: off, blank_space: SIZE_INFINITY}) } pub fn hardbreak_tok() -> Token { Self::hardbreak_tok_offset(0) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/print/pprust.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::AnnNode::*; use rustc_target::spec::abi::{self, Abi}; use ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax}; use ast::{SelfKind, GenericBound, TraitBoundModifier}; use ast::{Attribute, MacDelimiter, GenericArg}; use util::parser::{self, AssocOp, Fixity}; use attr; use source_map::{self, SourceMap, Spanned}; use syntax_pos::{self, BytePos}; use syntax_pos::hygiene::{Mark, SyntaxContext}; use parse::token::{self, BinOpToken, Token}; use parse::lexer::comments; use parse::{self, ParseSess}; use print::pp::{self, Breaks}; use print::pp::Breaks::{Consistent, Inconsistent}; use ptr::P; use std_inject; use symbol::keywords; use syntax_pos::{DUMMY_SP, FileName}; use tokenstream::{self, TokenStream, TokenTree}; use std::ascii; use std::io::{self, Write, Read}; use std::iter::Peekable; use std::vec; pub enum AnnNode<'a> { NodeIdent(&'a ast::Ident), NodeName(&'a ast::Name), NodeBlock(&'a ast::Block), NodeItem(&'a ast::Item), NodeSubItem(ast::NodeId), NodeExpr(&'a ast::Expr), NodePat(&'a ast::Pat), } pub trait PpAnn { fn pre(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) } fn post(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) } } #[derive(Copy, Clone)] pub struct NoAnn; impl PpAnn for NoAnn {} pub struct State<'a> { pub s: pp::Printer<'a>, cm: Option<&'a SourceMap>, comments: Option<Vec<comments::Comment> >, literals: Peekable<vec::IntoIter<comments::Literal>>, cur_cmnt: usize, boxes: Vec<pp::Breaks>, ann: &'a (dyn PpAnn+'a), } fn rust_printer<'a>(writer: Box<dyn Write+'a>, ann: &'a dyn PpAnn) -> State<'a> { State { s: pp::mk_printer(writer, DEFAULT_COLUMNS), cm: None, comments: None, literals: vec![].into_iter().peekable(), cur_cmnt: 0, boxes: Vec::new(), ann, } } pub const INDENT_UNIT: usize = 4; pub const DEFAULT_COLUMNS: usize = 78; /// Requires you to pass an input filename and reader so that /// it can scan the input text for comments and literals to /// copy forward. pub fn print_crate<'a>(cm: &'a SourceMap, sess: &ParseSess, krate: &ast::Crate, filename: FileName, input: &mut dyn Read, out: Box<dyn Write+'a>, ann: &'a dyn PpAnn, is_expanded: bool) -> io::Result<()> { let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded); if is_expanded && std_inject::injected_crate_name().is_some() { // We need to print `#![no_std]` (and its feature gate) so that // compiling pretty-printed source won't inject libstd again. // However we don't want these attributes in the AST because // of the feature gate, so we fake them up here. // #![feature(prelude_import)] let pi_nested = attr::mk_nested_word_item(ast::Ident::from_str("prelude_import")); let list = attr::mk_list_item(DUMMY_SP, ast::Ident::from_str("feature"), vec![pi_nested]); let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), list); s.print_attribute(&fake_attr)?; // #![no_std] let no_std_meta = attr::mk_word_item(ast::Ident::from_str("no_std")); let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), no_std_meta); s.print_attribute(&fake_attr)?; } s.print_mod(&krate.module, &krate.attrs)?; s.print_remaining_comments()?; s.s.eof() } impl<'a> State<'a> { pub fn new_from_input(cm: &'a SourceMap, sess: &ParseSess, filename: FileName, input: &mut dyn Read, out: Box<dyn Write+'a>, ann: &'a dyn PpAnn, is_expanded: bool) -> State<'a> { let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input); State::new( cm, out, ann, Some(cmnts), // If the code is post expansion, don't use the table of // literals, since it doesn't correspond with the literals // in the AST anymore. if is_expanded { None } else { Some(lits) }) } pub fn new(cm: &'a SourceMap, out: Box<dyn Write+'a>, ann: &'a dyn PpAnn, comments: Option<Vec<comments::Comment>>, literals: Option<Vec<comments::Literal>>) -> State<'a> { State { s: pp::mk_printer(out, DEFAULT_COLUMNS), cm: Some(cm), comments, literals: literals.unwrap_or_default().into_iter().peekable(), cur_cmnt: 0, boxes: Vec::new(), ann, } } } pub fn to_string<F>(f: F) -> String where F: FnOnce(&mut State) -> io::Result<()>, { let mut wr = Vec::new(); { let ann = NoAnn; let mut printer = rust_printer(Box::new(&mut wr), &ann); f(&mut printer).unwrap(); printer.s.eof().unwrap(); } String::from_utf8(wr).unwrap() } fn binop_to_string(op: BinOpToken) -> &'static str { match op { token::Plus => "+", token::Minus => "-", token::Star => "*", token::Slash => "/", token::Percent => "%", token::Caret => "^", token::And => "&", token::Or => "|", token::Shl => "<<", token::Shr => ">>", } } pub fn token_to_string(tok: &Token) -> String { match *tok { token::Eq => "=".to_string(), token::Lt => "<".to_string(), token::Le => "<=".to_string(), token::EqEq => "==".to_string(), token::Ne => "!=".to_string(), token::Ge => ">=".to_string(), token::Gt => ">".to_string(), token::Not => "!".to_string(), token::Tilde => "~".to_string(), token::OrOr => "||".to_string(), token::AndAnd => "&&".to_string(), token::BinOp(op) => binop_to_string(op).to_string(), token::BinOpEq(op) => format!("{}=", binop_to_string(op)), /* Structural symbols */ token::At => "@".to_string(), token::Dot => ".".to_string(), token::DotDot => "..".to_string(), token::DotDotDot => "...".to_string(), token::DotDotEq => "..=".to_string(), token::DotEq => ".=".to_string(), token::Comma => ",".to_string(), token::Semi => ";".to_string(), token::Colon => ":".to_string(), token::ModSep => "::".to_string(), token::RArrow => "->".to_string(), token::LArrow => "<-".to_string(), token::FatArrow => "=>".to_string(), token::OpenDelim(token::Paren) => "(".to_string(), token::CloseDelim(token::Paren) => ")".to_string(), token::OpenDelim(token::Bracket) => "[".to_string(), token::CloseDelim(token::Bracket) => "]".to_string(), token::OpenDelim(token::Brace) => "{".to_string(), token::CloseDelim(token::Brace) => "}".to_string(), token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => " ".to_string(), token::Pound => "#".to_string(), token::Dollar => "$".to_string(), token::Question => "?".to_string(), token::SingleQuote => "'".to_string(), /* Literals */ token::Literal(lit, suf) => { let mut out = match lit { token::Byte(b) => format!("b'{}'", b), token::Char(c) => format!("'{}'", c), token::Float(c) | token::Integer(c) => c.to_string(), token::Str_(s) => format!("\"{}\"", s), token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}", delim="#".repeat(n as usize), string=s), token::ByteStr(v) => format!("b\"{}\"", v), token::ByteStrRaw(s, n) => format!("br{delim}\"{string}\"{delim}", delim="#".repeat(n as usize), string=s), }; if let Some(s) = suf { out.push_str(&s.as_str()) } out } /* Name components */ token::Ident(s, false) => s.to_string(), token::Ident(s, true) => format!("r#{}", s), token::Lifetime(s) => s.to_string(), /* Other */ token::DocComment(s) => s.to_string(), token::Eof => "<eof>".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref e) => expr_to_string(e), token::NtMeta(ref e) => meta_item_to_string(e), token::NtTy(ref e) => ty_to_string(e), token::NtPath(ref e) => path_to_string(e), token::NtItem(ref e) => item_to_string(e), token::NtBlock(ref e) => block_to_string(e), token::NtStmt(ref e) => stmt_to_string(e), token::NtPat(ref e) => pat_to_string(e), token::NtIdent(e, false) => ident_to_string(e), token::NtIdent(e, true) => format!("r#{}", ident_to_string(e)), token::NtLifetime(e) => ident_to_string(e), token::NtLiteral(ref e) => expr_to_string(e), token::NtTT(ref tree) => tt_to_string(tree.clone()), token::NtArm(ref e) => arm_to_string(e), token::NtImplItem(ref e) => impl_item_to_string(e), token::NtTraitItem(ref e) => trait_item_to_string(e), token::NtGenerics(ref e) => generic_params_to_string(&e.params), token::NtWhereClause(ref e) => where_clause_to_string(e), token::NtArg(ref e) => arg_to_string(e), token::NtVis(ref e) => vis_to_string(e), token::NtForeignItem(ref e) => foreign_item_to_string(e), } } } pub fn ty_to_string(ty: &ast::Ty) -> String { to_string(|s| s.print_type(ty)) } pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String { to_string(|s| s.print_type_bounds("", bounds)) } pub fn pat_to_string(pat: &ast::Pat) -> String { to_string(|s| s.print_pat(pat)) } pub fn arm_to_string(arm: &ast::Arm) -> String { to_string(|s| s.print_arm(arm)) } pub fn expr_to_string(e: &ast::Expr) -> String { to_string(|s| s.print_expr(e)) } pub fn lifetime_to_string(lt: &ast::Lifetime) -> String { to_string(|s| s.print_lifetime(*lt)) } pub fn tt_to_string(tt: tokenstream::TokenTree) -> String { to_string(|s| s.print_tt(tt)) } pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String { to_string(|s| s.print_tts(tts.iter().cloned().collect())) } pub fn tokens_to_string(tokens: TokenStream) -> String { to_string(|s| s.print_tts(tokens)) } pub fn stmt_to_string(stmt: &ast::Stmt) -> String { to_string(|s| s.print_stmt(stmt)) } pub fn attr_to_string(attr: &ast::Attribute) -> String { to_string(|s| s.print_attribute(attr)) } pub fn item_to_string(i: &ast::Item) -> String { to_string(|s| s.print_item(i)) } pub fn impl_item_to_string(i: &ast::ImplItem) -> String { to_string(|s| s.print_impl_item(i)) } pub fn trait_item_to_string(i: &ast::TraitItem) -> String { to_string(|s| s.print_trait_item(i)) } pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String { to_string(|s| s.print_generic_params(generic_params)) } pub fn where_clause_to_string(i: &ast::WhereClause) -> String { to_string(|s| s.print_where_clause(i)) } pub fn fn_block_to_string(p: &ast::FnDecl) -> String { to_string(|s| s.print_fn_block_args(p)) } pub fn path_to_string(p: &ast::Path) -> String { to_string(|s| s.print_path(p, false, 0)) } pub fn path_segment_to_string(p: &ast::PathSegment) -> String { to_string(|s| s.print_path_segment(p, false)) } pub fn ident_to_string(id: ast::Ident) -> String { to_string(|s| s.print_ident(id)) } pub fn vis_to_string(v: &ast::Visibility) -> String { to_string(|s| s.print_visibility(v)) } pub fn fun_to_string(decl: &ast::FnDecl, header: ast::FnHeader, name: ast::Ident, generics: &ast::Generics) -> String { to_string(|s| { s.head("")?; s.print_fn(decl, header, Some(name), generics, &source_map::dummy_spanned(ast::VisibilityKind::Inherited))?; s.end()?; // Close the head box s.end() // Close the outer box }) } pub fn block_to_string(blk: &ast::Block) -> String { to_string(|s| { // containing cbox, will be closed by print-block at } s.cbox(INDENT_UNIT)?; // head-ibox, will be closed by print-block after { s.ibox(0)?; s.print_block(blk) }) } pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String { to_string(|s| s.print_meta_list_item(li)) } pub fn meta_item_to_string(mi: &ast::MetaItem) -> String { to_string(|s| s.print_meta_item(mi)) } pub fn attribute_to_string(attr: &ast::Attribute) -> String { to_string(|s| s.print_attribute(attr)) } pub fn lit_to_string(l: &ast::Lit) -> String { to_string(|s| s.print_literal(l)) } pub fn variant_to_string(var: &ast::Variant) -> String { to_string(|s| s.print_variant(var)) } pub fn arg_to_string(arg: &ast::Arg) -> String { to_string(|s| s.print_arg(arg, false)) } pub fn mac_to_string(arg: &ast::Mac) -> String { to_string(|s| s.print_mac(arg)) } pub fn foreign_item_to_string(arg: &ast::ForeignItem) -> String { to_string(|s| s.print_foreign_item(arg)) } pub fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String { format!("{}{}", to_string(|s| s.print_visibility(vis)), s) } pub trait PrintState<'a> { fn writer(&mut self) -> &mut pp::Printer<'a>; fn boxes(&mut self) -> &mut Vec<pp::Breaks>; fn comments(&mut self) -> &mut Option<Vec<comments::Comment>>; fn cur_cmnt(&mut self) -> &mut usize; fn cur_lit(&mut self) -> Option<&comments::Literal>; fn bump_lit(&mut self) -> Option<comments::Literal>; fn word_space(&mut self, w: &str) -> io::Result<()> { self.writer().word(w)?; self.writer().space() } fn popen(&mut self) -> io::Result<()> { self.writer().word("(") } fn pclose(&mut self) -> io::Result<()> { self.writer().word(")") } fn is_begin(&mut self) -> bool { match self.writer().last_token() { pp::Token::Begin(_) => true, _ => false, } } fn is_end(&mut self) -> bool { match self.writer().last_token() { pp::Token::End => true, _ => false, } } // is this the beginning of a line? fn is_bol(&mut self) -> bool { self.writer().last_token().is_eof() || self.writer().last_token().is_hardbreak_tok() } fn hardbreak_if_not_bol(&mut self) -> io::Result<()> { if !self.is_bol() { self.writer().hardbreak()? } Ok(()) } // "raw box" fn rbox(&mut self, u: usize, b: pp::Breaks) -> io::Result<()> { self.boxes().push(b); self.writer().rbox(u, b) } fn ibox(&mut self, u: usize) -> io::Result<()> { self.boxes().push(pp::Breaks::Inconsistent); self.writer().ibox(u) } fn end(&mut self) -> io::Result<()> { self.boxes().pop().unwrap(); self.writer().end() } fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> io::Result<()> where F: FnMut(&mut Self, &T) -> io::Result<()>, { self.rbox(0, b)?; let mut first = true; for elt in elts { if first { first = false; } else { self.word_space(",")?; } op(self, elt)?; } self.end() } fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> { while let Some(ltrl) = self.cur_lit().cloned() { if ltrl.pos > pos { break; } // we don't need the value here since we're forced to clone cur_lit // due to lack of NLL. self.bump_lit(); if ltrl.pos == pos { return Some(ltrl); } } None } fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> { while let Some(ref cmnt) = self.next_comment() { if cmnt.pos < pos { self.print_comment(cmnt)?; } else { break } } Ok(()) } fn print_comment(&mut self, cmnt: &comments::Comment) -> io::Result<()> { let r = match cmnt.style { comments::Mixed => { assert_eq!(cmnt.lines.len(), 1); self.writer().zerobreak()?; self.writer().word(&cmnt.lines[0])?; self.writer().zerobreak() } comments::Isolated => { self.hardbreak_if_not_bol()?; for line in &cmnt.lines { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { self.writer().word(&line[..])?; } self.writer().hardbreak()?; } Ok(()) } comments::Trailing => { if !self.is_bol() { self.writer().word(" ")?; } if cmnt.lines.len() == 1 { self.writer().word(&cmnt.lines[0])?; self.writer().hardbreak() } else { self.ibox(0)?; for line in &cmnt.lines { if !line.is_empty() { self.writer().word(&line[..])?; } self.writer().hardbreak()?; } self.end() } } comments::BlankLine => { // We need to do at least one, possibly two hardbreaks. let is_semi = match self.writer().last_token() { pp::Token::String(s, _) => ";" == s, _ => false }; if is_semi || self.is_begin() || self.is_end() { self.writer().hardbreak()?; } self.writer().hardbreak() } }; match r { Ok(()) => { *self.cur_cmnt() = *self.cur_cmnt() + 1; Ok(()) } Err(e) => Err(e), } } fn next_comment(&mut self) -> Option<comments::Comment> { let cur_cmnt = *self.cur_cmnt(); match *self.comments() { Some(ref cmnts) => { if cur_cmnt < cmnts.len() { Some(cmnts[cur_cmnt].clone()) } else { None } } _ => None } } fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> { self.maybe_print_comment(lit.span.lo())?; if let Some(ltrl) = self.next_lit(lit.span.lo()) { return self.writer().word(&ltrl.lit); } match lit.node { ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style), ast::LitKind::Byte(byte) => { let mut res = String::from("b'"); res.extend(ascii::escape_default(byte).map(|c| c as char)); res.push('\''); self.writer().word(&res[..]) } ast::LitKind::Char(ch) => { let mut res = String::from("'"); res.extend(ch.escape_default()); res.push('\''); self.writer().word(&res[..]) } ast::LitKind::Int(i, t) => { match t { ast::LitIntType::Signed(st) => { self.writer().word(&st.val_to_string(i as i128)) } ast::LitIntType::Unsigned(ut) => { self.writer().word(&ut.val_to_string(i)) } ast::LitIntType::Unsuffixed => { self.writer().word(&i.to_string()) } } } ast::LitKind::Float(ref f, t) => { self.writer().word(&format!("{}{}", &f, t.ty_to_string())) } ast::LitKind::FloatUnsuffixed(ref f) => self.writer().word(&f.as_str()), ast::LitKind::Bool(val) => { if val { self.writer().word("true") } else { self.writer().word("false") } } ast::LitKind::ByteStr(ref v) => { let mut escaped: String = String::new(); for &ch in v.iter() { escaped.extend(ascii::escape_default(ch) .map(|c| c as char)); } self.writer().word(&format!("b\"{}\"", escaped)) } } } fn print_string(&mut self, st: &str, style: ast::StrStyle) -> io::Result<()> { let st = match style { ast::StrStyle::Cooked => { (format!("\"{}\"", st.escape_debug())) } ast::StrStyle::Raw(n) => { (format!("r{delim}\"{string}\"{delim}", delim="#".repeat(n as usize), string=st)) } }; self.writer().word(&st[..]) } fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true) } fn print_inner_attributes_no_trailing_hardbreak(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, false) } fn print_outer_attributes(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Outer, false, true) } fn print_inner_attributes_inline(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Inner, true, true) } fn print_outer_attributes_inline(&mut self, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_either_attributes(attrs, ast::AttrStyle::Outer, true, true) } fn print_either_attributes(&mut self, attrs: &[ast::Attribute], kind: ast::AttrStyle, is_inline: bool, trailing_hardbreak: bool) -> io::Result<()> { let mut count = 0; for attr in attrs { if attr.style == kind { self.print_attribute_inline(attr, is_inline)?; if is_inline { self.nbsp()?; } count += 1; } } if count > 0 && trailing_hardbreak && !is_inline { self.hardbreak_if_not_bol()?; } Ok(()) } fn print_attribute_path(&mut self, path: &ast::Path) -> io::Result<()> { for (i, segment) in path.segments.iter().enumerate() { if i > 0 { self.writer().word("::")? } if segment.ident.name != keywords::CrateRoot.name() && segment.ident.name != keywords::DollarCrate.name() { self.writer().word(&segment.ident.as_str())?; } else if segment.ident.name == keywords::DollarCrate.name() { self.print_dollar_crate(segment.ident.span.ctxt())?; } } Ok(()) } fn print_attribute(&mut self, attr: &ast::Attribute) -> io::Result<()> { self.print_attribute_inline(attr, false) } fn print_attribute_inline(&mut self, attr: &ast::Attribute, is_inline: bool) -> io::Result<()> { if !is_inline { self.hardbreak_if_not_bol()?; } self.maybe_print_comment(attr.span.lo())?; if attr.is_sugared_doc { self.writer().word(&attr.value_str().unwrap().as_str())?; self.writer().hardbreak() } else { match attr.style { ast::AttrStyle::Inner => self.writer().word("#![")?, ast::AttrStyle::Outer => self.writer().word("#[")?, } if let Some(mi) = attr.meta() { self.print_meta_item(&mi)? } else { self.print_attribute_path(&attr.path)?; self.writer().space()?; self.print_tts(attr.tokens.clone())?; } self.writer().word("]") } } fn print_meta_list_item(&mut self, item: &ast::NestedMetaItem) -> io::Result<()> { match item.node { ast::NestedMetaItemKind::MetaItem(ref mi) => { self.print_meta_item(mi) }, ast::NestedMetaItemKind::Literal(ref lit) => { self.print_literal(lit) } } } fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> { self.ibox(INDENT_UNIT)?; match item.node { ast::MetaItemKind::Word => self.print_attribute_path(&item.ident)?, ast::MetaItemKind::NameValue(ref value) => { self.print_attribute_path(&item.ident)?; self.writer().space()?; self.word_space("=")?; self.print_literal(value)?; } ast::MetaItemKind::List(ref items) => { self.print_attribute_path(&item.ident)?; self.popen()?; self.commasep(Consistent, &items[..], |s, i| s.print_meta_list_item(i))?; self.pclose()?; } } self.end() } /// This doesn't deserve to be called "pretty" printing, but it should be /// meaning-preserving. A quick hack that might help would be to look at the /// spans embedded in the TTs to decide where to put spaces and newlines. /// But it'd be better to parse these according to the grammar of the /// appropriate macro, transcribe back into the grammar we just parsed from, /// and then pretty-print the resulting AST nodes (so, e.g., we print /// expression arguments as expressions). It can be done! I think. fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { match tt { TokenTree::Token(_, ref tk) => { self.writer().word(&token_to_string(tk))?; match *tk { parse::token::DocComment(..) => { self.writer().hardbreak() } _ => Ok(()) } } TokenTree::Delimited(_, ref delimed) => { self.writer().word(&token_to_string(&delimed.open_token()))?; self.writer().space()?; self.print_tts(delimed.stream())?; self.writer().space()?; self.writer().word(&token_to_string(&delimed.close_token())) }, } } fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> { self.ibox(0)?; for (i, tt) in tts.into_trees().enumerate() { if i != 0 { self.writer().space()?; } self.print_tt(tt)?; } self.end() } fn space_if_not_bol(&mut self) -> io::Result<()> { if !self.is_bol() { self.writer().space()?; } Ok(()) } fn nbsp(&mut self) -> io::Result<()> { self.writer().word(" ") } fn print_dollar_crate(&mut self, mut ctxt: SyntaxContext) -> io::Result<()> { if let Some(mark) = ctxt.adjust(Mark::root()) { // Make a best effort to print something that complies if mark.is_builtin() { if let Some(name) = std_inject::injected_crate_name() { self.writer().word("::")?; self.writer().word(name)?; } } } Ok(()) } } impl<'a> PrintState<'a> for State<'a> { fn writer(&mut self) -> &mut pp::Printer<'a> { &mut self.s } fn boxes(&mut self) -> &mut Vec<pp::Breaks> { &mut self.boxes } fn comments(&mut self) -> &mut Option<Vec<comments::Comment>> { &mut self.comments } fn cur_cmnt(&mut self) -> &mut usize { &mut self.cur_cmnt } fn cur_lit(&mut self) -> Option<&comments::Literal> { self.literals.peek() } fn bump_lit(&mut self) -> Option<comments::Literal> { self.literals.next() } } impl<'a> State<'a> { pub fn cbox(&mut self, u: usize) -> io::Result<()> { self.boxes.push(pp::Breaks::Consistent); self.s.cbox(u) } pub fn word_nbsp(&mut self, w: &str) -> io::Result<()> { self.s.word(w)?; self.nbsp() } pub fn head(&mut self, w: &str) -> io::Result<()> { // outer-box is consistent self.cbox(INDENT_UNIT)?; // head-box is inconsistent self.ibox(w.len() + 1)?; // keyword that starts the head if !w.is_empty() { self.word_nbsp(w)?; } Ok(()) } pub fn bopen(&mut self) -> io::Result<()> { self.s.word("{")?; self.end() // close the head-box } pub fn bclose_(&mut self, span: syntax_pos::Span, indented: usize) -> io::Result<()> { self.bclose_maybe_open(span, indented, true) } pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span, indented: usize, close_box: bool) -> io::Result<()> { self.maybe_print_comment(span.hi())?; self.break_offset_if_not_bol(1, -(indented as isize))?; self.s.word("}")?; if close_box { self.end()?; // close the outer-box } Ok(()) } pub fn bclose(&mut self, span: syntax_pos::Span) -> io::Result<()> { self.bclose_(span, INDENT_UNIT) } pub fn in_cbox(&self) -> bool { match self.boxes.last() { Some(&last_box) => last_box == pp::Breaks::Consistent, None => false } } pub fn break_offset_if_not_bol(&mut self, n: usize, off: isize) -> io::Result<()> { if !self.is_bol() { self.s.break_offset(n, off) } else { if off != 0 && self.s.last_token().is_hardbreak_tok() { // We do something pretty sketchy here: tuck the nonzero // offset-adjustment we were going to deposit along with the // break into the previous hardbreak. self.s.replace_last_token(pp::Printer::hardbreak_tok_offset(off)); } Ok(()) } } // Synthesizes a comment that was not textually present in the original source // file. pub fn synth_comment(&mut self, text: String) -> io::Result<()> { self.s.word("/*")?; self.s.space()?; self.s.word(&text[..])?; self.s.space()?; self.s.word("*/") } pub fn commasep_cmnt<T, F, G>(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) -> io::Result<()> where F: FnMut(&mut State, &T) -> io::Result<()>, G: FnMut(&T) -> syntax_pos::Span, { self.rbox(0, b)?; let len = elts.len(); let mut i = 0; for elt in elts { self.maybe_print_comment(get_span(elt).hi())?; op(self, elt)?; i += 1; if i < len { self.s.word(",")?; self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi()))?; self.space_if_not_bol()?; } } self.end() } pub fn commasep_exprs(&mut self, b: Breaks, exprs: &[P<ast::Expr>]) -> io::Result<()> { self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span) } pub fn print_mod(&mut self, _mod: &ast::Mod, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_inner_attributes(attrs)?; for item in &_mod.items { self.print_item(item)?; } Ok(()) } pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_inner_attributes(attrs)?; for item in &nmod.items { self.print_foreign_item(item)?; } Ok(()) } pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) -> io::Result<()> { if let Some(lt) = *lifetime { self.print_lifetime(lt)?; self.nbsp()?; } Ok(()) } pub fn print_generic_arg(&mut self, generic_arg: &GenericArg) -> io::Result<()> { match generic_arg { GenericArg::Lifetime(lt) => self.print_lifetime(*lt), GenericArg::Type(ty) => self.print_type(ty), } } pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> { self.maybe_print_comment(ty.span.lo())?; self.ibox(0)?; match ty.node { ast::TyKind::Slice(ref ty) => { self.s.word("[")?; self.print_type(ty)?; self.s.word("]")?; } ast::TyKind::Ptr(ref mt) => { self.s.word("*")?; match mt.mutbl { ast::Mutability::Mutable => self.word_nbsp("mut")?, ast::Mutability::Immutable => self.word_nbsp("const")?, } self.print_type(&mt.ty)?; } ast::TyKind::Rptr(ref lifetime, ref mt) => { self.s.word("&")?; self.print_opt_lifetime(lifetime)?; self.print_mt(mt)?; } ast::TyKind::Never => { self.s.word("!")?; }, ast::TyKind::Tup(ref elts) => { self.popen()?; self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(ty))?; if elts.len() == 1 { self.s.word(",")?; } self.pclose()?; } ast::TyKind::Paren(ref typ) => { self.popen()?; self.print_type(typ)?; self.pclose()?; } ast::TyKind::BareFn(ref f) => { self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &f.generic_params)?; } ast::TyKind::Path(None, ref path) => { self.print_path(path, false, 0)?; } ast::TyKind::Path(Some(ref qself), ref path) => { self.print_qpath(path, qself, false)? } ast::TyKind::TraitObject(ref bounds, syntax) => { let prefix = if syntax == ast::TraitObjectSyntax::Dyn { "dyn" } else { "" }; self.print_type_bounds(prefix, &bounds[..])?; } ast::TyKind::ImplTrait(_, ref bounds) => { self.print_type_bounds("impl", &bounds[..])?; } ast::TyKind::Array(ref ty, ref length) => { self.s.word("[")?; self.print_type(ty)?; self.s.word("; ")?; self.print_expr(&length.value)?; self.s.word("]")?; } ast::TyKind::Typeof(ref e) => { self.s.word("typeof(")?; self.print_expr(&e.value)?; self.s.word(")")?; } ast::TyKind::Infer => { self.s.word("_")?; } ast::TyKind::Err => { self.s.word("?")?; } ast::TyKind::ImplicitSelf => { self.s.word("Self")?; } ast::TyKind::Mac(ref m) => { self.print_mac(m)?; } } self.end() } pub fn print_foreign_item(&mut self, item: &ast::ForeignItem) -> io::Result<()> { self.hardbreak_if_not_bol()?; self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; match item.node { ast::ForeignItemKind::Fn(ref decl, ref generics) => { self.head("")?; self.print_fn(decl, ast::FnHeader::default(), Some(item.ident), generics, &item.vis)?; self.end()?; // end head-ibox self.s.word(";")?; self.end() // end the outer fn box } ast::ForeignItemKind::Static(ref t, m) => { self.head(&visibility_qualified(&item.vis, "static"))?; if m { self.word_space("mut")?; } self.print_ident(item.ident)?; self.word_space(":")?; self.print_type(t)?; self.s.word(";")?; self.end()?; // end the head-ibox self.end() // end the outer cbox } ast::ForeignItemKind::Ty => { self.head(&visibility_qualified(&item.vis, "type"))?; self.print_ident(item.ident)?; self.s.word(";")?; self.end()?; // end the head-ibox self.end() // end the outer cbox } ast::ForeignItemKind::Macro(ref m) => { self.print_mac(m)?; match m.node.delim { MacDelimiter::Brace => Ok(()), _ => self.s.word(";") } } } } fn print_associated_const(&mut self, ident: ast::Ident, ty: &ast::Ty, default: Option<&ast::Expr>, vis: &ast::Visibility) -> io::Result<()> { self.s.word(&visibility_qualified(vis, ""))?; self.word_space("const")?; self.print_ident(ident)?; self.word_space(":")?; self.print_type(ty)?; if let Some(expr) = default { self.s.space()?; self.word_space("=")?; self.print_expr(expr)?; } self.s.word(";") } fn print_associated_type(&mut self, ident: ast::Ident, bounds: Option<&ast::GenericBounds>, ty: Option<&ast::Ty>) -> io::Result<()> { self.word_space("type")?; self.print_ident(ident)?; if let Some(bounds) = bounds { self.print_type_bounds(":", bounds)?; } if let Some(ty) = ty { self.s.space()?; self.word_space("=")?; self.print_type(ty)?; } self.s.word(";") } /// Pretty-print an item pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> { self.hardbreak_if_not_bol()?; self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; self.ann.pre(self, NodeItem(item))?; match item.node { ast::ItemKind::ExternCrate(orig_name) => { self.head(&visibility_qualified(&item.vis, "extern crate"))?; if let Some(orig_name) = orig_name { self.print_name(orig_name)?; self.s.space()?; self.s.word("as")?; self.s.space()?; } self.print_ident(item.ident)?; self.s.word(";")?; self.end()?; // end inner head-block self.end()?; // end outer head-block } ast::ItemKind::Use(ref tree) => { self.head(&visibility_qualified(&item.vis, "use"))?; self.print_use_tree(tree)?; self.s.word(";")?; self.end()?; // end inner head-block self.end()?; // end outer head-block } ast::ItemKind::Static(ref ty, m, ref expr) => { self.head(&visibility_qualified(&item.vis, "static"))?; if m == ast::Mutability::Mutable { self.word_space("mut")?; } self.print_ident(item.ident)?; self.word_space(":")?; self.print_type(ty)?; self.s.space()?; self.end()?; // end the head-ibox self.word_space("=")?; self.print_expr(expr)?; self.s.word(";")?; self.end()?; // end the outer cbox } ast::ItemKind::Const(ref ty, ref expr) => { self.head(&visibility_qualified(&item.vis, "const"))?; self.print_ident(item.ident)?; self.word_space(":")?; self.print_type(ty)?; self.s.space()?; self.end()?; // end the head-ibox self.word_space("=")?; self.print_expr(expr)?; self.s.word(";")?; self.end()?; // end the outer cbox } ast::ItemKind::Fn(ref decl, header, ref typarams, ref body) => { self.head("")?; self.print_fn( decl, header, Some(item.ident), typarams, &item.vis )?; self.s.word(" ")?; self.print_block_with_attrs(body, &item.attrs)?; } ast::ItemKind::Mod(ref _mod) => { self.head(&visibility_qualified(&item.vis, "mod"))?; self.print_ident(item.ident)?; self.nbsp()?; self.bopen()?; self.print_mod(_mod, &item.attrs)?; self.bclose(item.span)?; } ast::ItemKind::ForeignMod(ref nmod) => { self.head("extern")?; self.word_nbsp(&nmod.abi.to_string())?; self.bopen()?; self.print_foreign_mod(nmod, &item.attrs)?; self.bclose(item.span)?; } ast::ItemKind::GlobalAsm(ref ga) => { self.head(&visibility_qualified(&item.vis, "global_asm!"))?; self.s.word(&ga.asm.as_str())?; self.end()?; } ast::ItemKind::Ty(ref ty, ref generics) => { self.head(&visibility_qualified(&item.vis, "type"))?; self.print_ident(item.ident)?; self.print_generic_params(&generics.params)?; self.end()?; // end the inner ibox self.print_where_clause(&generics.where_clause)?; self.s.space()?; self.word_space("=")?; self.print_type(ty)?; self.s.word(";")?; self.end()?; // end the outer ibox } ast::ItemKind::Existential(ref bounds, ref generics) => { self.head(&visibility_qualified(&item.vis, "existential type"))?; self.print_ident(item.ident)?; self.print_generic_params(&generics.params)?; self.end()?; // end the inner ibox self.print_where_clause(&generics.where_clause)?; self.s.space()?; self.print_type_bounds(":", bounds)?; self.s.word(";")?; self.end()?; // end the outer ibox } ast::ItemKind::Enum(ref enum_definition, ref params) => { self.print_enum_def( enum_definition, params, item.ident, item.span, &item.vis )?; } ast::ItemKind::Struct(ref struct_def, ref generics) => { self.head(&visibility_qualified(&item.vis, "struct"))?; self.print_struct(struct_def, generics, item.ident, item.span, true)?; } ast::ItemKind::Union(ref struct_def, ref generics) => { self.head(&visibility_qualified(&item.vis, "union"))?; self.print_struct(struct_def, generics, item.ident, item.span, true)?; } ast::ItemKind::Impl(unsafety, polarity, defaultness, ref generics, ref opt_trait, ref ty, ref impl_items) => { self.head("")?; self.print_visibility(&item.vis)?; self.print_defaultness(defaultness)?; self.print_unsafety(unsafety)?; self.word_nbsp("impl")?; if !generics.params.is_empty() { self.print_generic_params(&generics.params)?; self.s.space()?; } if polarity == ast::ImplPolarity::Negative { self.s.word("!")?; } if let Some(ref t) = *opt_trait { self.print_trait_ref(t)?; self.s.space()?; self.word_space("for")?; } self.print_type(ty)?; self.print_where_clause(&generics.where_clause)?; self.s.space()?; self.bopen()?; self.print_inner_attributes(&item.attrs)?; for impl_item in impl_items { self.print_impl_item(impl_item)?; } self.bclose(item.span)?; } ast::ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref trait_items) => { self.head("")?; self.print_visibility(&item.vis)?; self.print_unsafety(unsafety)?; self.print_is_auto(is_auto)?; self.word_nbsp("trait")?; self.print_ident(item.ident)?; self.print_generic_params(&generics.params)?; let mut real_bounds = Vec::with_capacity(bounds.len()); for b in bounds.iter() { if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b { self.s.space()?; self.word_space("for ?")?; self.print_trait_ref(&ptr.trait_ref)?; } else { real_bounds.push(b.clone()); } } self.print_type_bounds(":", &real_bounds[..])?; self.print_where_clause(&generics.where_clause)?; self.s.word(" ")?; self.bopen()?; for trait_item in trait_items { self.print_trait_item(trait_item)?; } self.bclose(item.span)?; } ast::ItemKind::TraitAlias(ref generics, ref bounds) => { self.head("")?; self.print_visibility(&item.vis)?; self.word_nbsp("trait")?; self.print_ident(item.ident)?; self.print_generic_params(&generics.params)?; let mut real_bounds = Vec::with_capacity(bounds.len()); // FIXME(durka) this seems to be some quite outdated syntax for b in bounds.iter() { if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b { self.s.space()?; self.word_space("for ?")?; self.print_trait_ref(&ptr.trait_ref)?; } else { real_bounds.push(b.clone()); } } self.nbsp()?; self.print_type_bounds("=", &real_bounds[..])?; self.print_where_clause(&generics.where_clause)?; self.s.word(";")?; } ast::ItemKind::Mac(ref mac) => { if item.ident.name == keywords::Invalid.name() { self.print_mac(mac)?; match mac.node.delim { MacDelimiter::Brace => {} _ => self.s.word(";")?, } } else { self.print_path(&mac.node.path, false, 0)?; self.s.word("! ")?; self.print_ident(item.ident)?; self.cbox(INDENT_UNIT)?; self.popen()?; self.print_tts(mac.node.stream())?; self.pclose()?; self.s.word(";")?; self.end()?; } } ast::ItemKind::MacroDef(ref tts) => { self.s.word("macro_rules! ")?; self.print_ident(item.ident)?; self.cbox(INDENT_UNIT)?; self.popen()?; self.print_tts(tts.stream())?; self.pclose()?; self.s.word(";")?; self.end()?; } } self.ann.post(self, NodeItem(item)) } fn print_trait_ref(&mut self, t: &ast::TraitRef) -> io::Result<()> { self.print_path(&t.path, false, 0) } fn print_formal_generic_params( &mut self, generic_params: &[ast::GenericParam] ) -> io::Result<()> { if !generic_params.is_empty() { self.s.word("for")?; self.print_generic_params(generic_params)?; self.nbsp()?; } Ok(()) } fn print_poly_trait_ref(&mut self, t: &ast::PolyTraitRef) -> io::Result<()> { self.print_formal_generic_params(&t.bound_generic_params)?; self.print_trait_ref(&t.trait_ref) } pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef, generics: &ast::Generics, ident: ast::Ident, span: syntax_pos::Span, visibility: &ast::Visibility) -> io::Result<()> { self.head(&visibility_qualified(visibility, "enum"))?; self.print_ident(ident)?; self.print_generic_params(&generics.params)?; self.print_where_clause(&generics.where_clause)?; self.s.space()?; self.print_variants(&enum_definition.variants, span) } pub fn print_variants(&mut self, variants: &[ast::Variant], span: syntax_pos::Span) -> io::Result<()> { self.bopen()?; for v in variants { self.space_if_not_bol()?; self.maybe_print_comment(v.span.lo())?; self.print_outer_attributes(&v.node.attrs)?; self.ibox(INDENT_UNIT)?; self.print_variant(v)?; self.s.word(",")?; self.end()?; self.maybe_print_trailing_comment(v.span, None)?; } self.bclose(span) } pub fn print_visibility(&mut self, vis: &ast::Visibility) -> io::Result<()> { match vis.node { ast::VisibilityKind::Public => self.word_nbsp("pub"), ast::VisibilityKind::Crate(sugar) => match sugar { ast::CrateSugar::PubCrate => self.word_nbsp("pub(crate)"), ast::CrateSugar::JustCrate => self.word_nbsp("crate") } ast::VisibilityKind::Restricted { ref path, .. } => { let path = to_string(|s| s.print_path(path, false, 0)); if path == "self" || path == "super" { self.word_nbsp(&format!("pub({})", path)) } else { self.word_nbsp(&format!("pub(in {})", path)) } } ast::VisibilityKind::Inherited => Ok(()) } } pub fn print_defaultness(&mut self, defaultness: ast::Defaultness) -> io::Result<()> { if let ast::Defaultness::Default = defaultness { try!(self.word_nbsp("default")); } Ok(()) } pub fn print_struct(&mut self, struct_def: &ast::VariantData, generics: &ast::Generics, ident: ast::Ident, span: syntax_pos::Span, print_finalizer: bool) -> io::Result<()> { self.print_ident(ident)?; self.print_generic_params(&generics.params)?; if !struct_def.is_struct() { if struct_def.is_tuple() { self.popen()?; self.commasep( Inconsistent, struct_def.fields(), |s, field| { s.maybe_print_comment(field.span.lo())?; s.print_outer_attributes(&field.attrs)?; s.print_visibility(&field.vis)?; s.print_type(&field.ty) } )?; self.pclose()?; } self.print_where_clause(&generics.where_clause)?; if print_finalizer { self.s.word(";")?; } self.end()?; self.end() // close the outer-box } else { self.print_where_clause(&generics.where_clause)?; self.nbsp()?; self.bopen()?; self.hardbreak_if_not_bol()?; for field in struct_def.fields() { self.hardbreak_if_not_bol()?; self.maybe_print_comment(field.span.lo())?; self.print_outer_attributes(&field.attrs)?; self.print_visibility(&field.vis)?; self.print_ident(field.ident.unwrap())?; self.word_nbsp(":")?; self.print_type(&field.ty)?; self.s.word(",")?; } self.bclose(span) } } pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> { self.head("")?; let generics = ast::Generics::default(); self.print_struct(&v.node.data, &generics, v.node.ident, v.span, false)?; match v.node.disr_expr { Some(ref d) => { self.s.space()?; self.word_space("=")?; self.print_expr(&d.value) } _ => Ok(()) } } pub fn print_method_sig(&mut self, ident: ast::Ident, generics: &ast::Generics, m: &ast::MethodSig, vis: &ast::Visibility) -> io::Result<()> { self.print_fn(&m.decl, m.header, Some(ident), &generics, vis) } pub fn print_trait_item(&mut self, ti: &ast::TraitItem) -> io::Result<()> { self.ann.pre(self, NodeSubItem(ti.id))?; self.hardbreak_if_not_bol()?; self.maybe_print_comment(ti.span.lo())?; self.print_outer_attributes(&ti.attrs)?; match ti.node { ast::TraitItemKind::Const(ref ty, ref default) => { self.print_associated_const( ti.ident, ty, default.as_ref().map(|expr| &**expr), &source_map::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited), )?; } ast::TraitItemKind::Method(ref sig, ref body) => { if body.is_some() { self.head("")?; } self.print_method_sig( ti.ident, &ti.generics, sig, &source_map::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited), )?; if let Some(ref body) = *body { self.nbsp()?; self.print_block_with_attrs(body, &ti.attrs)?; } else { self.s.word(";")?; } } ast::TraitItemKind::Type(ref bounds, ref default) => { self.print_associated_type(ti.ident, Some(bounds), default.as_ref().map(|ty| &**ty))?; } ast::TraitItemKind::Macro(ref mac) => { self.print_mac(mac)?; match mac.node.delim { MacDelimiter::Brace => {} _ => self.s.word(";")?, } } } self.ann.post(self, NodeSubItem(ti.id)) } pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> { self.ann.pre(self, NodeSubItem(ii.id))?; self.hardbreak_if_not_bol()?; self.maybe_print_comment(ii.span.lo())?; self.print_outer_attributes(&ii.attrs)?; self.print_defaultness(ii.defaultness)?; match ii.node { ast::ImplItemKind::Const(ref ty, ref expr) => { self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?; } ast::ImplItemKind::Method(ref sig, ref body) => { self.head("")?; self.print_method_sig(ii.ident, &ii.generics, sig, &ii.vis)?; self.nbsp()?; self.print_block_with_attrs(body, &ii.attrs)?; } ast::ImplItemKind::Type(ref ty) => { self.print_associated_type(ii.ident, None, Some(ty))?; } ast::ImplItemKind::Existential(ref bounds) => { self.word_space("existential")?; self.print_associated_type(ii.ident, Some(bounds), None)?; } ast::ImplItemKind::Macro(ref mac) => { self.print_mac(mac)?; match mac.node.delim { MacDelimiter::Brace => {} _ => self.s.word(";")?, } } } self.ann.post(self, NodeSubItem(ii.id)) } pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> { self.maybe_print_comment(st.span.lo())?; match st.node { ast::StmtKind::Local(ref loc) => { self.print_outer_attributes(&loc.attrs)?; self.space_if_not_bol()?; self.ibox(INDENT_UNIT)?; self.word_nbsp("let")?; self.ibox(INDENT_UNIT)?; self.print_local_decl(loc)?; self.end()?; if let Some(ref init) = loc.init { self.nbsp()?; self.word_space("=")?; self.print_expr(init)?; } self.s.word(";")?; self.end()?; } ast::StmtKind::Item(ref item) => self.print_item(item)?, ast::StmtKind::Expr(ref expr) => { self.space_if_not_bol()?; self.print_expr_outer_attr_style(expr, false)?; if parse::classify::expr_requires_semi_to_be_stmt(expr) { self.s.word(";")?; } } ast::StmtKind::Semi(ref expr) => { self.space_if_not_bol()?; self.print_expr_outer_attr_style(expr, false)?; self.s.word(";")?; } ast::StmtKind::Mac(ref mac) => { let (ref mac, style, ref attrs) = **mac; self.space_if_not_bol()?; self.print_outer_attributes(attrs)?; self.print_mac(mac)?; if style == ast::MacStmtStyle::Semicolon { self.s.word(";")?; } } } self.maybe_print_trailing_comment(st.span, None) } pub fn print_block(&mut self, blk: &ast::Block) -> io::Result<()> { self.print_block_with_attrs(blk, &[]) } pub fn print_block_unclosed(&mut self, blk: &ast::Block) -> io::Result<()> { self.print_block_unclosed_indent(blk, INDENT_UNIT) } pub fn print_block_unclosed_with_attrs(&mut self, blk: &ast::Block, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_block_maybe_unclosed(blk, INDENT_UNIT, attrs, false) } pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block, indented: usize) -> io::Result<()> { self.print_block_maybe_unclosed(blk, indented, &[], false) } pub fn print_block_with_attrs(&mut self, blk: &ast::Block, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_block_maybe_unclosed(blk, INDENT_UNIT, attrs, true) } pub fn print_block_maybe_unclosed(&mut self, blk: &ast::Block, indented: usize, attrs: &[ast::Attribute], close_box: bool) -> io::Result<()> { match blk.rules { BlockCheckMode::Unsafe(..) => self.word_space("unsafe")?, BlockCheckMode::Default => () } self.maybe_print_comment(blk.span.lo())?; self.ann.pre(self, NodeBlock(blk))?; self.bopen()?; self.print_inner_attributes(attrs)?; for (i, st) in blk.stmts.iter().enumerate() { match st.node { ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => { self.maybe_print_comment(st.span.lo())?; self.space_if_not_bol()?; self.print_expr_outer_attr_style(expr, false)?; self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?; } _ => self.print_stmt(st)?, } } self.bclose_maybe_open(blk.span, indented, close_box)?; self.ann.post(self, NodeBlock(blk)) } fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> { match els { Some(_else) => { match _else.node { // "another else-if" ast::ExprKind::If(ref i, ref then, ref e) => { self.cbox(INDENT_UNIT - 1)?; self.ibox(0)?; self.s.word(" else if ")?; self.print_expr_as_cond(i)?; self.s.space()?; self.print_block(then)?; self.print_else(e.as_ref().map(|e| &**e)) } // "another else-if-let" ast::ExprKind::IfLet(ref pats, ref expr, ref then, ref e) => { self.cbox(INDENT_UNIT - 1)?; self.ibox(0)?; self.s.word(" else if let ")?; self.print_pats(pats)?; self.s.space()?; self.word_space("=")?; self.print_expr_as_cond(expr)?; self.s.space()?; self.print_block(then)?; self.print_else(e.as_ref().map(|e| &**e)) } // "final else" ast::ExprKind::Block(ref b, _) => { self.cbox(INDENT_UNIT - 1)?; self.ibox(0)?; self.s.word(" else ")?; self.print_block(b) } // BLEAH, constraints would be great here _ => { panic!("print_if saw if with weird alternative"); } } } _ => Ok(()) } } pub fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block, elseopt: Option<&ast::Expr>) -> io::Result<()> { self.head("if")?; self.print_expr_as_cond(test)?; self.s.space()?; self.print_block(blk)?; self.print_else(elseopt) } pub fn print_if_let(&mut self, pats: &[P<ast::Pat>], expr: &ast::Expr, blk: &ast::Block, elseopt: Option<&ast::Expr>) -> io::Result<()> { self.head("if let")?; self.print_pats(pats)?; self.s.space()?; self.word_space("=")?; self.print_expr_as_cond(expr)?; self.s.space()?; self.print_block(blk)?; self.print_else(elseopt) } pub fn print_mac(&mut self, m: &ast::Mac) -> io::Result<()> { self.print_path(&m.node.path, false, 0)?; self.s.word("!")?; match m.node.delim { MacDelimiter::Parenthesis => self.popen()?, MacDelimiter::Bracket => self.s.word("[")?, MacDelimiter::Brace => { self.head("")?; self.bopen()?; } } self.print_tts(m.node.stream())?; match m.node.delim { MacDelimiter::Parenthesis => self.pclose(), MacDelimiter::Bracket => self.s.word("]"), MacDelimiter::Brace => self.bclose(m.span), } } fn print_call_post(&mut self, args: &[P<ast::Expr>]) -> io::Result<()> { self.popen()?; self.commasep_exprs(Inconsistent, args)?; self.pclose() } pub fn print_expr_maybe_paren(&mut self, expr: &ast::Expr, prec: i8) -> io::Result<()> { let needs_par = expr.precedence().order() < prec; if needs_par { self.popen()?; } self.print_expr(expr)?; if needs_par { self.pclose()?; } Ok(()) } /// Print an expr using syntax that's acceptable in a condition position, such as the `cond` in /// `if cond { ... }`. pub fn print_expr_as_cond(&mut self, expr: &ast::Expr) -> io::Result<()> { let needs_par = match expr.node { // These cases need parens due to the parse error observed in #26461: `if return {}` // parses as the erroneous construct `if (return {})`, not `if (return) {}`. ast::ExprKind::Closure(..) | ast::ExprKind::Ret(..) | ast::ExprKind::Break(..) => true, _ => parser::contains_exterior_struct_lit(expr), }; if needs_par { self.popen()?; } self.print_expr(expr)?; if needs_par { self.pclose()?; } Ok(()) } fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>], attrs: &[Attribute]) -> io::Result<()> { self.ibox(INDENT_UNIT)?; self.s.word("[")?; self.print_inner_attributes_inline(attrs)?; self.commasep_exprs(Inconsistent, &exprs[..])?; self.s.word("]")?; self.end() } fn print_expr_repeat(&mut self, element: &ast::Expr, count: &ast::AnonConst, attrs: &[Attribute]) -> io::Result<()> { self.ibox(INDENT_UNIT)?; self.s.word("[")?; self.print_inner_attributes_inline(attrs)?; self.print_expr(element)?; self.word_space(";")?; self.print_expr(&count.value)?; self.s.word("]")?; self.end() } fn print_expr_struct(&mut self, path: &ast::Path, fields: &[ast::Field], wth: &Option<P<ast::Expr>>, attrs: &[Attribute]) -> io::Result<()> { self.print_path(path, true, 0)?; self.s.word("{")?; self.print_inner_attributes_inline(attrs)?; self.commasep_cmnt( Consistent, &fields[..], |s, field| { s.ibox(INDENT_UNIT)?; if !field.is_shorthand { s.print_ident(field.ident)?; s.word_space(":")?; } s.print_expr(&field.expr)?; s.end() }, |f| f.span)?; match *wth { Some(ref expr) => { self.ibox(INDENT_UNIT)?; if !fields.is_empty() { self.s.word(",")?; self.s.space()?; } self.s.word("..")?; self.print_expr(expr)?; self.end()?; } _ => if !fields.is_empty() { self.s.word(",")? } } self.s.word("}")?; Ok(()) } fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>], attrs: &[Attribute]) -> io::Result<()> { self.popen()?; self.print_inner_attributes_inline(attrs)?; self.commasep_exprs(Inconsistent, &exprs[..])?; if exprs.len() == 1 { self.s.word(",")?; } self.pclose() } fn print_expr_call(&mut self, func: &ast::Expr, args: &[P<ast::Expr>]) -> io::Result<()> { let prec = match func.node { ast::ExprKind::Field(..) => parser::PREC_FORCE_PAREN, _ => parser::PREC_POSTFIX, }; self.print_expr_maybe_paren(func, prec)?; self.print_call_post(args) } fn print_expr_method_call(&mut self, segment: &ast::PathSegment, args: &[P<ast::Expr>]) -> io::Result<()> { let base_args = &args[1..]; self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?; self.s.word(".")?; self.print_ident(segment.ident)?; if let Some(ref args) = segment.args { self.print_generic_args(args, true)?; } self.print_call_post(base_args) } fn print_expr_binary(&mut self, op: ast::BinOp, lhs: &ast::Expr, rhs: &ast::Expr) -> io::Result<()> { let assoc_op = AssocOp::from_ast_binop(op.node); let prec = assoc_op.precedence() as i8; let fixity = assoc_op.fixity(); let (left_prec, right_prec) = match fixity { Fixity::Left => (prec, prec + 1), Fixity::Right => (prec + 1, prec), Fixity::None => (prec + 1, prec + 1), }; let left_prec = match (&lhs.node, op.node) { // These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is // the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead // of `(x as i32) < ...`. We need to convince it _not_ to do that. (&ast::ExprKind::Cast { .. }, ast::BinOpKind::Lt) | (&ast::ExprKind::Cast { .. }, ast::BinOpKind::Shl) => parser::PREC_FORCE_PAREN, _ => left_prec, }; self.print_expr_maybe_paren(lhs, left_prec)?; self.s.space()?; self.word_space(op.node.to_string())?; self.print_expr_maybe_paren(rhs, right_prec) } fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr) -> io::Result<()> { self.s.word(ast::UnOp::to_string(op))?; self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) } fn print_expr_addr_of(&mut self, mutability: ast::Mutability, expr: &ast::Expr) -> io::Result<()> { self.s.word("&")?; self.print_mutability(mutability)?; self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) } pub fn print_expr(&mut self, expr: &ast::Expr) -> io::Result<()> { self.print_expr_outer_attr_style(expr, true) } fn print_expr_outer_attr_style(&mut self, expr: &ast::Expr, is_inline: bool) -> io::Result<()> { self.maybe_print_comment(expr.span.lo())?; let attrs = &expr.attrs; if is_inline { self.print_outer_attributes_inline(attrs)?; } else { self.print_outer_attributes(attrs)?; } self.ibox(INDENT_UNIT)?; self.ann.pre(self, NodeExpr(expr))?; match expr.node { ast::ExprKind::Box(ref expr) => { self.word_space("box")?; self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)?; } ast::ExprKind::ObsoleteInPlace(ref place, ref expr) => { let prec = AssocOp::ObsoleteInPlace.precedence() as i8; self.print_expr_maybe_paren(place, prec + 1)?; self.s.space()?; self.word_space("<-")?; self.print_expr_maybe_paren(expr, prec)?; } ast::ExprKind::Array(ref exprs) => { self.print_expr_vec(&exprs[..], attrs)?; } ast::ExprKind::Repeat(ref element, ref count) => { self.print_expr_repeat(element, count, attrs)?; } ast::ExprKind::Struct(ref path, ref fields, ref wth) => { self.print_expr_struct(path, &fields[..], wth, attrs)?; } ast::ExprKind::Tup(ref exprs) => { self.print_expr_tup(&exprs[..], attrs)?; } ast::ExprKind::Call(ref func, ref args) => { self.print_expr_call(func, &args[..])?; } ast::ExprKind::MethodCall(ref segment, ref args) => { self.print_expr_method_call(segment, &args[..])?; } ast::ExprKind::Binary(op, ref lhs, ref rhs) => { self.print_expr_binary(op, lhs, rhs)?; } ast::ExprKind::Unary(op, ref expr) => { self.print_expr_unary(op, expr)?; } ast::ExprKind::AddrOf(m, ref expr) => { self.print_expr_addr_of(m, expr)?; } ast::ExprKind::Lit(ref lit) => { self.print_literal(lit)?; } ast::ExprKind::Cast(ref expr, ref ty) => { let prec = AssocOp::As.precedence() as i8; self.print_expr_maybe_paren(expr, prec)?; self.s.space()?; self.word_space("as")?; self.print_type(ty)?; } ast::ExprKind::Type(ref expr, ref ty) => { let prec = AssocOp::Colon.precedence() as i8; self.print_expr_maybe_paren(expr, prec)?; self.word_space(":")?; self.print_type(ty)?; } ast::ExprKind::If(ref test, ref blk, ref elseopt) => { self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?; } ast::ExprKind::IfLet(ref pats, ref expr, ref blk, ref elseopt) => { self.print_if_let(pats, expr, blk, elseopt.as_ref().map(|e| &**e))?; } ast::ExprKind::While(ref test, ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident)?; self.word_space(":")?; } self.head("while")?; self.print_expr_as_cond(test)?; self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::WhileLet(ref pats, ref expr, ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident)?; self.word_space(":")?; } self.head("while let")?; self.print_pats(pats)?; self.s.space()?; self.word_space("=")?; self.print_expr_as_cond(expr)?; self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident)?; self.word_space(":")?; } self.head("for")?; self.print_pat(pat)?; self.s.space()?; self.word_space("in")?; self.print_expr_as_cond(iter)?; self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::Loop(ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident)?; self.word_space(":")?; } self.head("loop")?; self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::Match(ref expr, ref arms) => { self.cbox(INDENT_UNIT)?; self.ibox(4)?; self.word_nbsp("match")?; self.print_expr_as_cond(expr)?; self.s.space()?; self.bopen()?; self.print_inner_attributes_no_trailing_hardbreak(attrs)?; for arm in arms { self.print_arm(arm)?; } self.bclose_(expr.span, INDENT_UNIT)?; } ast::ExprKind::Closure( capture_clause, asyncness, movability, ref decl, ref body, _) => { self.print_movability(movability)?; self.print_asyncness(asyncness)?; self.print_capture_clause(capture_clause)?; self.print_fn_block_args(decl)?; self.s.space()?; self.print_expr(body)?; self.end()?; // need to close a box // a box will be closed by print_expr, but we didn't want an overall // wrapper so we closed the corresponding opening. so create an // empty box to satisfy the close. self.ibox(0)?; } ast::ExprKind::Block(ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident)?; self.word_space(":")?; } // containing cbox, will be closed by print-block at } self.cbox(INDENT_UNIT)?; // head-box, will be closed by print-block after { self.ibox(0)?; self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::Async(capture_clause, _, ref blk) => { self.word_nbsp("async")?; self.print_capture_clause(capture_clause)?; self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } ast::ExprKind::Assign(ref lhs, ref rhs) => { let prec = AssocOp::Assign.precedence() as i8; self.print_expr_maybe_paren(lhs, prec + 1)?; self.s.space()?; self.word_space("=")?; self.print_expr_maybe_paren(rhs, prec)?; } ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => { let prec = AssocOp::Assign.precedence() as i8; self.print_expr_maybe_paren(lhs, prec + 1)?; self.s.space()?; self.s.word(op.node.to_string())?; self.word_space("=")?; self.print_expr_maybe_paren(rhs, prec)?; } ast::ExprKind::Field(ref expr, ident) => { self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX)?; self.s.word(".")?; self.print_ident(ident)?; } ast::ExprKind::Index(ref expr, ref index) => { self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX)?; self.s.word("[")?; self.print_expr(index)?; self.s.word("]")?; } ast::ExprKind::Range(ref start, ref end, limits) => { // Special case for `Range`. `AssocOp` claims that `Range` has higher precedence // than `Assign`, but `x .. x = x` gives a parse error instead of `x .. (x = x)`. // Here we use a fake precedence value so that any child with lower precedence than // a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.) let fake_prec = AssocOp::LOr.precedence() as i8; if let Some(ref e) = *start { self.print_expr_maybe_paren(e, fake_prec)?; } if limits == ast::RangeLimits::HalfOpen { self.s.word("..")?; } else { self.s.word("..=")?; } if let Some(ref e) = *end { self.print_expr_maybe_paren(e, fake_prec)?; } } ast::ExprKind::Path(None, ref path) => { self.print_path(path, true, 0)? } ast::ExprKind::Path(Some(ref qself), ref path) => { self.print_qpath(path, qself, true)? } ast::ExprKind::Break(opt_label, ref opt_expr) => { self.s.word("break")?; self.s.space()?; if let Some(label) = opt_label { self.print_ident(label.ident)?; self.s.space()?; } if let Some(ref expr) = *opt_expr { self.print_expr_maybe_paren(expr, parser::PREC_JUMP)?; self.s.space()?; } } ast::ExprKind::Continue(opt_label) => { self.s.word("continue")?; self.s.space()?; if let Some(label) = opt_label { self.print_ident(label.ident)?; self.s.space()? } } ast::ExprKind::Ret(ref result) => { self.s.word("return")?; if let Some(ref expr) = *result { self.s.word(" ")?; self.print_expr_maybe_paren(expr, parser::PREC_JUMP)?; } } ast::ExprKind::InlineAsm(ref a) => { self.s.word("asm!")?; self.popen()?; self.print_string(&a.asm.as_str(), a.asm_str_style)?; self.word_space(":")?; self.commasep(Inconsistent, &a.outputs, |s, out| { let constraint = out.constraint.as_str(); let mut ch = constraint.chars(); match ch.next() { Some('=') if out.is_rw => { s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked)? } _ => s.print_string(&constraint, ast::StrStyle::Cooked)? } s.popen()?; s.print_expr(&out.expr)?; s.pclose()?; Ok(()) })?; self.s.space()?; self.word_space(":")?; self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| { s.print_string(&co.as_str(), ast::StrStyle::Cooked)?; s.popen()?; s.print_expr(o)?; s.pclose()?; Ok(()) })?; self.s.space()?; self.word_space(":")?; self.commasep(Inconsistent, &a.clobbers, |s, co| { s.print_string(&co.as_str(), ast::StrStyle::Cooked)?; Ok(()) })?; let mut options = vec![]; if a.volatile { options.push("volatile"); } if a.alignstack { options.push("alignstack"); } if a.dialect == ast::AsmDialect::Intel { options.push("intel"); } if !options.is_empty() { self.s.space()?; self.word_space(":")?; self.commasep(Inconsistent, &options, |s, &co| { s.print_string(co, ast::StrStyle::Cooked)?; Ok(()) })?; } self.pclose()?; } ast::ExprKind::Mac(ref m) => self.print_mac(m)?, ast::ExprKind::Paren(ref e) => { self.popen()?; self.print_inner_attributes_inline(attrs)?; self.print_expr(e)?; self.pclose()?; }, ast::ExprKind::Yield(ref e) => { self.s.word("yield")?; match *e { Some(ref expr) => { self.s.space()?; self.print_expr_maybe_paren(expr, parser::PREC_JUMP)?; } _ => () } } ast::ExprKind::Try(ref e) => { self.print_expr_maybe_paren(e, parser::PREC_POSTFIX)?; self.s.word("?")? } ast::ExprKind::Catch(ref blk) => { self.head("do catch")?; self.s.space()?; self.print_block_with_attrs(blk, attrs)? } } self.ann.post(self, NodeExpr(expr))?; self.end() } pub fn print_local_decl(&mut self, loc: &ast::Local) -> io::Result<()> { self.print_pat(&loc.pat)?; if let Some(ref ty) = loc.ty { self.word_space(":")?; self.print_type(ty)?; } Ok(()) } pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> { if ident.is_raw_guess() { self.s.word(&format!("r#{}", ident))?; } else { self.s.word(&ident.as_str())?; } self.ann.post(self, NodeIdent(&ident)) } pub fn print_usize(&mut self, i: usize) -> io::Result<()> { self.s.word(&i.to_string()) } pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> { self.s.word(&name.as_str())?; self.ann.post(self, NodeName(&name)) } pub fn print_for_decl(&mut self, loc: &ast::Local, coll: &ast::Expr) -> io::Result<()> { self.print_local_decl(loc)?; self.s.space()?; self.word_space("in")?; self.print_expr(coll) } fn print_path(&mut self, path: &ast::Path, colons_before_params: bool, depth: usize) -> io::Result<()> { self.maybe_print_comment(path.span.lo())?; for (i, segment) in path.segments[..path.segments.len() - depth].iter().enumerate() { if i > 0 { self.s.word("::")? } self.print_path_segment(segment, colons_before_params)?; } Ok(()) } fn print_path_segment(&mut self, segment: &ast::PathSegment, colons_before_params: bool) -> io::Result<()> { if segment.ident.name != keywords::CrateRoot.name() && segment.ident.name != keywords::DollarCrate.name() { self.print_ident(segment.ident)?; if let Some(ref args) = segment.args { self.print_generic_args(args, colons_before_params)?; } } else if segment.ident.name == keywords::DollarCrate.name() { self.print_dollar_crate(segment.ident.span.ctxt())?; } Ok(()) } fn print_qpath(&mut self, path: &ast::Path, qself: &ast::QSelf, colons_before_params: bool) -> io::Result<()> { self.s.word("<")?; self.print_type(&qself.ty)?; if qself.position > 0 { self.s.space()?; self.word_space("as")?; let depth = path.segments.len() - qself.position; self.print_path(path, false, depth)?; } self.s.word(">")?; self.s.word("::")?; let item_segment = path.segments.last().unwrap(); self.print_ident(item_segment.ident)?; match item_segment.args { Some(ref args) => self.print_generic_args(args, colons_before_params), None => Ok(()), } } fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool) -> io::Result<()> { if colons_before_params { self.s.word("::")? } match *args { ast::GenericArgs::AngleBracketed(ref data) => { self.s.word("<")?; self.commasep(Inconsistent, &data.args, |s, generic_arg| { s.print_generic_arg(generic_arg) })?; let mut comma = data.args.len() != 0; for binding in data.bindings.iter() { if comma { self.word_space(",")? } self.print_ident(binding.ident)?; self.s.space()?; self.word_space("=")?; self.print_type(&binding.ty)?; comma = true; } self.s.word(">")? } ast::GenericArgs::Parenthesized(ref data) => { self.s.word("(")?; self.commasep( Inconsistent, &data.inputs, |s, ty| s.print_type(ty))?; self.s.word(")")?; if let Some(ref ty) = data.output { self.space_if_not_bol()?; self.word_space("->")?; self.print_type(ty)?; } } } Ok(()) } pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> { self.maybe_print_comment(pat.span.lo())?; self.ann.pre(self, NodePat(pat))?; /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ match pat.node { PatKind::Wild => self.s.word("_")?, PatKind::Ident(binding_mode, ident, ref sub) => { match binding_mode { ast::BindingMode::ByRef(mutbl) => { self.word_nbsp("ref")?; self.print_mutability(mutbl)?; } ast::BindingMode::ByValue(ast::Mutability::Immutable) => {} ast::BindingMode::ByValue(ast::Mutability::Mutable) => { self.word_nbsp("mut")?; } } self.print_ident(ident)?; if let Some(ref p) = *sub { self.s.word("@")?; self.print_pat(p)?; } } PatKind::TupleStruct(ref path, ref elts, ddpos) => { self.print_path(path, true, 0)?; self.popen()?; if let Some(ddpos) = ddpos { self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?; if ddpos != 0 { self.word_space(",")?; } self.s.word("..")?; if ddpos != elts.len() { self.s.word(",")?; self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?; } } else { self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?; } self.pclose()?; } PatKind::Path(None, ref path) => { self.print_path(path, true, 0)?; } PatKind::Path(Some(ref qself), ref path) => { self.print_qpath(path, qself, false)?; } PatKind::Struct(ref path, ref fields, etc) => { self.print_path(path, true, 0)?; self.nbsp()?; self.word_space("{")?; self.commasep_cmnt( Consistent, &fields[..], |s, f| { s.cbox(INDENT_UNIT)?; if !f.node.is_shorthand { s.print_ident(f.node.ident)?; s.word_nbsp(":")?; } s.print_pat(&f.node.pat)?; s.end() }, |f| f.node.pat.span)?; if etc { if !fields.is_empty() { self.word_space(",")?; } self.s.word("..")?; } self.s.space()?; self.s.word("}")?; } PatKind::Tuple(ref elts, ddpos) => { self.popen()?; if let Some(ddpos) = ddpos { self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?; if ddpos != 0 { self.word_space(",")?; } self.s.word("..")?; if ddpos != elts.len() { self.s.word(",")?; self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?; } } else { self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?; if elts.len() == 1 { self.s.word(",")?; } } self.pclose()?; } PatKind::Box(ref inner) => { self.s.word("box ")?; self.print_pat(inner)?; } PatKind::Ref(ref inner, mutbl) => { self.s.word("&")?; if mutbl == ast::Mutability::Mutable { self.s.word("mut ")?; } self.print_pat(inner)?; } PatKind::Lit(ref e) => self.print_expr(&**e)?, PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => { self.print_expr(begin)?; self.s.space()?; match *end_kind { RangeEnd::Included(RangeSyntax::DotDotDot) => self.s.word("...")?, RangeEnd::Included(RangeSyntax::DotDotEq) => self.s.word("..=")?, RangeEnd::Excluded => self.s.word("..")?, } self.print_expr(end)?; } PatKind::Slice(ref before, ref slice, ref after) => { self.s.word("[")?; self.commasep(Inconsistent, &before[..], |s, p| s.print_pat(p))?; if let Some(ref p) = *slice { if !before.is_empty() { self.word_space(",")?; } if let PatKind::Wild = p.node { // Print nothing } else { self.print_pat(p)?; } self.s.word("..")?; if !after.is_empty() { self.word_space(",")?; } } self.commasep(Inconsistent, &after[..], |s, p| s.print_pat(p))?; self.s.word("]")?; } PatKind::Paren(ref inner) => { self.popen()?; self.print_pat(inner)?; self.pclose()?; } PatKind::Mac(ref m) => self.print_mac(m)?, } self.ann.post(self, NodePat(pat)) } fn print_pats(&mut self, pats: &[P<ast::Pat>]) -> io::Result<()> { let mut first = true; for p in pats { if first { first = false; } else { self.s.space()?; self.word_space("|")?; } self.print_pat(p)?; } Ok(()) } fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> { // I have no idea why this check is necessary, but here it // is :( if arm.attrs.is_empty() { self.s.space()?; } self.cbox(INDENT_UNIT)?; self.ibox(0)?; self.maybe_print_comment(arm.pats[0].span.lo())?; self.print_outer_attributes(&arm.attrs)?; self.print_pats(&arm.pats)?; self.s.space()?; if let Some(ref e) = arm.guard { self.word_space("if")?; self.print_expr(e)?; self.s.space()?; } self.word_space("=>")?; match arm.body.node { ast::ExprKind::Block(ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident)?; self.word_space(":")?; } // the block will close the pattern's ibox self.print_block_unclosed_indent(blk, INDENT_UNIT)?; // If it is a user-provided unsafe block, print a comma after it if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules { self.s.word(",")?; } } _ => { self.end()?; // close the ibox for the pattern self.print_expr(&arm.body)?; self.s.word(",")?; } } self.end() // close enclosing cbox } fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) -> io::Result<()> { match explicit_self.node { SelfKind::Value(m) => { self.print_mutability(m)?; self.s.word("self") } SelfKind::Region(ref lt, m) => { self.s.word("&")?; self.print_opt_lifetime(lt)?; self.print_mutability(m)?; self.s.word("self") } SelfKind::Explicit(ref typ, m) => { self.print_mutability(m)?; self.s.word("self")?; self.word_space(":")?; self.print_type(typ) } } } pub fn print_fn(&mut self, decl: &ast::FnDecl, header: ast::FnHeader, name: Option<ast::Ident>, generics: &ast::Generics, vis: &ast::Visibility) -> io::Result<()> { self.print_fn_header_info(header, vis)?; if let Some(name) = name { self.nbsp()?; self.print_ident(name)?; } self.print_generic_params(&generics.params)?; self.print_fn_args_and_ret(decl)?; self.print_where_clause(&generics.where_clause) } pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl) -> io::Result<()> { self.popen()?; self.commasep(Inconsistent, &decl.inputs, |s, arg| s.print_arg(arg, false))?; if decl.variadic { self.s.word(", ...")?; } self.pclose()?; self.print_fn_output(decl) } pub fn print_fn_block_args( &mut self, decl: &ast::FnDecl) -> io::Result<()> { self.s.word("|")?; self.commasep(Inconsistent, &decl.inputs, |s, arg| s.print_arg(arg, true))?; self.s.word("|")?; if let ast::FunctionRetTy::Default(..) = decl.output { return Ok(()); } self.space_if_not_bol()?; self.word_space("->")?; match decl.output { ast::FunctionRetTy::Ty(ref ty) => { self.print_type(ty)?; self.maybe_print_comment(ty.span.lo()) } ast::FunctionRetTy::Default(..) => unreachable!(), } } pub fn print_movability(&mut self, movability: ast::Movability) -> io::Result<()> { match movability { ast::Movability::Static => self.word_space("static"), ast::Movability::Movable => Ok(()), } } pub fn print_asyncness(&mut self, asyncness: ast::IsAsync) -> io::Result<()> { if asyncness.is_async() { self.word_nbsp("async")?; } Ok(()) } pub fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy) -> io::Result<()> { match capture_clause { ast::CaptureBy::Value => self.word_space("move"), ast::CaptureBy::Ref => Ok(()), } } pub fn print_type_bounds(&mut self, prefix: &str, bounds: &[ast::GenericBound]) -> io::Result<()> { if !bounds.is_empty() { self.s.word(prefix)?; let mut first = true; for bound in bounds { if !(first && prefix.is_empty()) { self.nbsp()?; } if first { first = false; } else { self.word_space("+")?; } match bound { GenericBound::Trait(tref, modifier) => { if modifier == &TraitBoundModifier::Maybe { self.s.word("?")?; } self.print_poly_trait_ref(tref)?; } GenericBound::Outlives(lt) => self.print_lifetime(*lt)?, } } } Ok(()) } pub fn print_lifetime(&mut self, lifetime: ast::Lifetime) -> io::Result<()> { self.print_name(lifetime.ident.name) } pub fn print_lifetime_bounds(&mut self, lifetime: ast::Lifetime, bounds: &ast::GenericBounds) -> io::Result<()> { self.print_lifetime(lifetime)?; if !bounds.is_empty() { self.s.word(": ")?; for (i, bound) in bounds.iter().enumerate() { if i != 0 { self.s.word(" + ")?; } match bound { ast::GenericBound::Outlives(lt) => self.print_lifetime(*lt)?, _ => panic!(), } } } Ok(()) } pub fn print_generic_params( &mut self, generic_params: &[ast::GenericParam] ) -> io::Result<()> { if generic_params.is_empty() { return Ok(()); } self.s.word("<")?; self.commasep(Inconsistent, &generic_params, |s, param| { match param.kind { ast::GenericParamKind::Lifetime => { s.print_outer_attributes_inline(&param.attrs)?; let lt = ast::Lifetime { id: param.id, ident: param.ident }; s.print_lifetime_bounds(lt, &param.bounds) }, ast::GenericParamKind::Type { ref default } => { s.print_outer_attributes_inline(&param.attrs)?; s.print_ident(param.ident)?; s.print_type_bounds(":", &param.bounds)?; match default { Some(ref default) => { s.s.space()?; s.word_space("=")?; s.print_type(default) } _ => Ok(()) } } } })?; self.s.word(">")?; Ok(()) } pub fn print_where_clause(&mut self, where_clause: &ast::WhereClause) -> io::Result<()> { if where_clause.predicates.is_empty() { return Ok(()) } self.s.space()?; self.word_space("where")?; for (i, predicate) in where_clause.predicates.iter().enumerate() { if i != 0 { self.word_space(",")?; } match *predicate { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { ref bound_generic_params, ref bounded_ty, ref bounds, .. }) => { self.print_formal_generic_params(bound_generic_params)?; self.print_type(bounded_ty)?; self.print_type_bounds(":", bounds)?; } ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime, ref bounds, ..}) => { self.print_lifetime_bounds(*lifetime, bounds)?; } ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{ref lhs_ty, ref rhs_ty, ..}) => { self.print_type(lhs_ty)?; self.s.space()?; self.word_space("=")?; self.print_type(rhs_ty)?; } } } Ok(()) } pub fn print_use_tree(&mut self, tree: &ast::UseTree) -> io::Result<()> { match tree.kind { ast::UseTreeKind::Simple(rename, ..) => { self.print_path(&tree.prefix, false, 0)?; if let Some(rename) = rename { self.s.space()?; self.word_space("as")?; self.print_ident(rename)?; } } ast::UseTreeKind::Glob => { if !tree.prefix.segments.is_empty() { self.print_path(&tree.prefix, false, 0)?; self.s.word("::")?; } self.s.word("*")?; } ast::UseTreeKind::Nested(ref items) => { if tree.prefix.segments.is_empty() { self.s.word("{")?; } else { self.print_path(&tree.prefix, false, 0)?; self.s.word("::{")?; } self.commasep(Inconsistent, &items[..], |this, &(ref tree, _)| { this.print_use_tree(tree) })?; self.s.word("}")?; } } Ok(()) } pub fn print_mutability(&mut self, mutbl: ast::Mutability) -> io::Result<()> { match mutbl { ast::Mutability::Mutable => self.word_nbsp("mut"), ast::Mutability::Immutable => Ok(()), } } pub fn print_mt(&mut self, mt: &ast::MutTy) -> io::Result<()> { self.print_mutability(mt.mutbl)?; self.print_type(&mt.ty) } pub fn print_arg(&mut self, input: &ast::Arg, is_closure: bool) -> io::Result<()> { self.ibox(INDENT_UNIT)?; match input.ty.node { ast::TyKind::Infer if is_closure => self.print_pat(&input.pat)?, _ => { if let Some(eself) = input.to_self() { self.print_explicit_self(&eself)?; } else { let invalid = if let PatKind::Ident(_, ident, _) = input.pat.node { ident.name == keywords::Invalid.name() } else { false }; if !invalid { self.print_pat(&input.pat)?; self.s.word(":")?; self.s.space()?; } self.print_type(&input.ty)?; } } } self.end() } pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> io::Result<()> { if let ast::FunctionRetTy::Default(..) = decl.output { return Ok(()); } self.space_if_not_bol()?; self.ibox(INDENT_UNIT)?; self.word_space("->")?; match decl.output { ast::FunctionRetTy::Default(..) => unreachable!(), ast::FunctionRetTy::Ty(ref ty) => self.print_type(ty)? } self.end()?; match decl.output { ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo()), _ => Ok(()) } } pub fn print_ty_fn(&mut self, abi: abi::Abi, unsafety: ast::Unsafety, decl: &ast::FnDecl, name: Option<ast::Ident>, generic_params: &[ast::GenericParam]) -> io::Result<()> { self.ibox(INDENT_UNIT)?; if !generic_params.is_empty() { self.s.word("for")?; self.print_generic_params(generic_params)?; } let generics = ast::Generics { params: Vec::new(), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), span: syntax_pos::DUMMY_SP, }, span: syntax_pos::DUMMY_SP, }; self.print_fn(decl, ast::FnHeader { unsafety, abi, ..ast::FnHeader::default() }, name, &generics, &source_map::dummy_spanned(ast::VisibilityKind::Inherited))?; self.end() } pub fn maybe_print_trailing_comment(&mut self, span: syntax_pos::Span, next_pos: Option<BytePos>) -> io::Result<()> { let cm = match self.cm { Some(cm) => cm, _ => return Ok(()) }; if let Some(ref cmnt) = self.next_comment() { if cmnt.style != comments::Trailing { return Ok(()) } let span_line = cm.lookup_char_pos(span.hi()); let comment_line = cm.lookup_char_pos(cmnt.pos); let next = next_pos.unwrap_or(cmnt.pos + BytePos(1)); if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line { self.print_comment(cmnt)?; } } Ok(()) } pub fn print_remaining_comments(&mut self) -> io::Result<()> { // If there aren't any remaining comments, then we need to manually // make sure there is a line break at the end. if self.next_comment().is_none() { self.s.hardbreak()?; } while let Some(ref cmnt) = self.next_comment() { self.print_comment(cmnt)?; } Ok(()) } pub fn print_opt_abi_and_extern_if_nondefault(&mut self, opt_abi: Option<Abi>) -> io::Result<()> { match opt_abi { Some(Abi::Rust) => Ok(()), Some(abi) => { self.word_nbsp("extern")?; self.word_nbsp(&abi.to_string()) } None => Ok(()) } } pub fn print_extern_opt_abi(&mut self, opt_abi: Option<Abi>) -> io::Result<()> { match opt_abi { Some(abi) => { self.word_nbsp("extern")?; self.word_nbsp(&abi.to_string()) } None => Ok(()) } } pub fn print_fn_header_info(&mut self, header: ast::FnHeader, vis: &ast::Visibility) -> io::Result<()> { self.s.word(&visibility_qualified(vis, ""))?; match header.constness.node { ast::Constness::NotConst => {} ast::Constness::Const => self.word_nbsp("const")? } self.print_asyncness(header.asyncness)?; self.print_unsafety(header.unsafety)?; if header.abi != Abi::Rust { self.word_nbsp("extern")?; self.word_nbsp(&header.abi.to_string())?; } self.s.word("fn") } pub fn print_unsafety(&mut self, s: ast::Unsafety) -> io::Result<()> { match s { ast::Unsafety::Normal => Ok(()), ast::Unsafety::Unsafe => self.word_nbsp("unsafe"), } } pub fn print_is_auto(&mut self, s: ast::IsAuto) -> io::Result<()> { match s { ast::IsAuto::Yes => self.word_nbsp("auto"), ast::IsAuto::No => Ok(()), } } } #[cfg(test)] mod tests { use super::*; use ast; use source_map; use syntax_pos; use with_globals; #[test] fn test_fun_to_string() { with_globals(|| { let abba_ident = ast::Ident::from_str("abba"); let decl = ast::FnDecl { inputs: Vec::new(), output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP), variadic: false }; let generics = ast::Generics::default(); assert_eq!( fun_to_string( &decl, ast::FnHeader { unsafety: ast::Unsafety::Normal, constness: source_map::dummy_spanned(ast::Constness::NotConst), asyncness: ast::IsAsync::NotAsync, abi: Abi::Rust, }, abba_ident, &generics ), "fn abba()" ); }) } #[test] fn test_variant_to_string() { with_globals(|| { let ident = ast::Ident::from_str("principal_skinner"); let var = source_map::respan(syntax_pos::DUMMY_SP, ast::Variant_ { ident, attrs: Vec::new(), // making this up as I go.... ? data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), disr_expr: None, }); let varstr = variant_to_string(&var); assert_eq!(varstr, "principal_skinner"); }) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/attr/mod.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Functions dealing with attributes and meta items mod builtin; pub use self::builtin::{ cfg_matches, contains_feature_attr, eval_condition, find_crate_name, find_deprecation, find_repr_attrs, find_stability, find_unwind_attr, Deprecation, InlineAttr, IntType, ReprAttr, RustcConstUnstable, RustcDeprecation, Stability, StabilityLevel, UnwindAttr, }; pub use self::IntType::*; pub use self::ReprAttr::*; pub use self::StabilityLevel::*; use ast; use ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam}; use source_map::{BytePos, Spanned, respan, dummy_spanned}; use syntax_pos::{FileName, Span}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::parser::Parser; use parse::{self, ParseSess, PResult}; use parse::token::{self, Token}; use ptr::P; use symbol::Symbol; use ThinVec; use tokenstream::{TokenStream, TokenTree, Delimited}; use GLOBALS; use std::iter; pub fn mark_used(attr: &Attribute) { debug!("Marking {:?} as used.", attr); GLOBALS.with(|globals| { globals.used_attrs.lock().insert(attr.id); }); } pub fn is_used(attr: &Attribute) -> bool { GLOBALS.with(|globals| { globals.used_attrs.lock().contains(attr.id) }) } pub fn mark_known(attr: &Attribute) { debug!("Marking {:?} as known.", attr); GLOBALS.with(|globals| { globals.known_attrs.lock().insert(attr.id); }); } pub fn is_known(attr: &Attribute) -> bool { GLOBALS.with(|globals| { globals.known_attrs.lock().contains(attr.id) }) } pub fn is_known_lint_tool(m_item: Ident) -> bool { ["clippy"].contains(&m_item.as_str().as_ref()) } impl NestedMetaItem { /// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem. pub fn meta_item(&self) -> Option<&MetaItem> { match self.node { NestedMetaItemKind::MetaItem(ref item) => Some(item), _ => None } } /// Returns the Lit if self is a NestedMetaItemKind::Literal. pub fn literal(&self) -> Option<&Lit> { match self.node { NestedMetaItemKind::Literal(ref lit) => Some(lit), _ => None } } /// Returns the Span for `self`. pub fn span(&self) -> Span { self.span } /// Returns true if this list item is a MetaItem with a name of `name`. pub fn check_name(&self, name: &str) -> bool { self.meta_item().map_or(false, |meta_item| meta_item.check_name(name)) } /// Returns the name of the meta item, e.g. `foo` in `#[foo]`, /// `#[foo="bar"]` and `#[foo(bar)]`, if self is a MetaItem pub fn name(&self) -> Option<Name> { self.meta_item().and_then(|meta_item| Some(meta_item.name())) } /// Gets the string value if self is a MetaItem and the MetaItem is a /// MetaItemKind::NameValue variant containing a string, otherwise None. pub fn value_str(&self) -> Option<Symbol> { self.meta_item().and_then(|meta_item| meta_item.value_str()) } /// Returns a name and single literal value tuple of the MetaItem. pub fn name_value_literal(&self) -> Option<(Name, &Lit)> { self.meta_item().and_then( |meta_item| meta_item.meta_item_list().and_then( |meta_item_list| { if meta_item_list.len() == 1 { let nested_item = &meta_item_list[0]; if nested_item.is_literal() { Some((meta_item.name(), nested_item.literal().unwrap())) } else { None } } else { None }})) } /// Returns a MetaItem if self is a MetaItem with Kind Word. pub fn word(&self) -> Option<&MetaItem> { self.meta_item().and_then(|meta_item| if meta_item.is_word() { Some(meta_item) } else { None }) } /// Gets a list of inner meta items from a list MetaItem type. pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { self.meta_item().and_then(|meta_item| meta_item.meta_item_list()) } /// Returns `true` if the variant is MetaItem. pub fn is_meta_item(&self) -> bool { self.meta_item().is_some() } /// Returns `true` if the variant is Literal. pub fn is_literal(&self) -> bool { self.literal().is_some() } /// Returns `true` if self is a MetaItem and the meta item is a word. pub fn is_word(&self) -> bool { self.word().is_some() } /// Returns `true` if self is a MetaItem and the meta item is a ValueString. pub fn is_value_str(&self) -> bool { self.value_str().is_some() } /// Returns `true` if self is a MetaItem and the meta item is a list. pub fn is_meta_item_list(&self) -> bool { self.meta_item_list().is_some() } } fn name_from_path(path: &Path) -> Name { path.segments.last().expect("empty path in attribute").ident.name } impl Attribute { pub fn check_name(&self, name: &str) -> bool { let matches = self.path == name; if matches { mark_used(self); } matches } /// Returns the **last** segment of the name of this attribute. /// E.g. `foo` for `#[foo]`, `skip` for `#[rustfmt::skip]`. pub fn name(&self) -> Name { name_from_path(&self.path) } pub fn value_str(&self) -> Option<Symbol> { self.meta().and_then(|meta| meta.value_str()) } pub fn meta_item_list(&self) -> Option<Vec<NestedMetaItem>> { match self.meta() { Some(MetaItem { node: MetaItemKind::List(list), .. }) => Some(list), _ => None } } pub fn is_word(&self) -> bool { self.path.segments.len() == 1 && self.tokens.is_empty() } pub fn span(&self) -> Span { self.span } pub fn is_meta_item_list(&self) -> bool { self.meta_item_list().is_some() } /// Indicates if the attribute is a Value String. pub fn is_value_str(&self) -> bool { self.value_str().is_some() } } impl MetaItem { pub fn name(&self) -> Name { name_from_path(&self.ident) } pub fn value_str(&self) -> Option<Symbol> { match self.node { MetaItemKind::NameValue(ref v) => { match v.node { LitKind::Str(ref s, _) => Some(*s), _ => None, } }, _ => None } } pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { match self.node { MetaItemKind::List(ref l) => Some(&l[..]), _ => None } } pub fn is_word(&self) -> bool { match self.node { MetaItemKind::Word => true, _ => false, } } pub fn span(&self) -> Span { self.span } pub fn check_name(&self, name: &str) -> bool { self.name() == name } pub fn is_value_str(&self) -> bool { self.value_str().is_some() } pub fn is_meta_item_list(&self) -> bool { self.meta_item_list().is_some() } pub fn is_scoped(&self) -> Option<Ident> { if self.ident.segments.len() > 1 { Some(self.ident.segments[0].ident) } else { None } } } impl Attribute { /// Extract the MetaItem from inside this Attribute. pub fn meta(&self) -> Option<MetaItem> { let mut tokens = self.tokens.trees().peekable(); Some(MetaItem { ident: self.path.clone(), node: if let Some(node) = MetaItemKind::from_tokens(&mut tokens) { if tokens.peek().is_some() { return None; } node } else { return None; }, span: self.span, }) } pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { let mut parser = Parser::new(sess, self.tokens.clone(), None, false, false); let result = f(&mut parser)?; if parser.token != token::Eof { parser.unexpected()?; } Ok(result) } pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec<T>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { if self.tokens.is_empty() { return Ok(Vec::new()); } self.parse(sess, |parser| { parser.expect(&token::OpenDelim(token::Paren))?; let mut list = Vec::new(); while !parser.eat(&token::CloseDelim(token::Paren)) { list.push(f(parser)?); if !parser.eat(&token::Comma) { parser.expect(&token::CloseDelim(token::Paren))?; break } } Ok(list) }) } pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> { Ok(MetaItem { ident: self.path.clone(), node: self.parse(sess, |parser| parser.parse_meta_item_kind())?, span: self.span, }) } /// Convert self to a normal #[doc="foo"] comment, if it is a /// comment like `///` or `/** */`. (Returns self unchanged for /// non-sugared doc attributes.) pub fn with_desugared_doc<T, F>(&self, f: F) -> T where F: FnOnce(&Attribute) -> T, { if self.is_sugared_doc { let comment = self.value_str().unwrap(); let meta = mk_name_value_item_str( Ident::from_str("doc"), dummy_spanned(Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())))); let mut attr = if self.style == ast::AttrStyle::Outer { mk_attr_outer(self.span, self.id, meta) } else { mk_attr_inner(self.span, self.id, meta) }; attr.is_sugared_doc = true; f(&attr) } else { f(self) } } } /* Constructors */ pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem { let value = respan(value.span, LitKind::Str(value.node, ast::StrStyle::Cooked)); mk_name_value_item(ident.span.to(value.span), ident, value) } pub fn mk_name_value_item(span: Span, ident: Ident, value: ast::Lit) -> MetaItem { MetaItem { ident: Path::from_ident(ident), span, node: MetaItemKind::NameValue(value) } } pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem { MetaItem { ident: Path::from_ident(ident), span, node: MetaItemKind::List(items) } } pub fn mk_word_item(ident: Ident) -> MetaItem { MetaItem { ident: Path::from_ident(ident), span: ident.span, node: MetaItemKind::Word } } pub fn mk_nested_word_item(ident: Ident) -> NestedMetaItem { respan(ident.span, NestedMetaItemKind::MetaItem(mk_word_item(ident))) } pub fn mk_attr_id() -> AttrId { use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; static NEXT_ATTR_ID: AtomicUsize = AtomicUsize::new(0); let id = NEXT_ATTR_ID.fetch_add(1, Ordering::SeqCst); assert!(id != ::std::usize::MAX); AttrId(id) } /// Returns an inner attribute with the given value. pub fn mk_attr_inner(span: Span, id: AttrId, item: MetaItem) -> Attribute { mk_spanned_attr_inner(span, id, item) } /// Returns an inner attribute with the given value and span. pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: MetaItem) -> Attribute { Attribute { id, style: ast::AttrStyle::Inner, path: item.ident, tokens: item.node.tokens(item.span), is_sugared_doc: false, span: sp, } } /// Returns an outer attribute with the given value. pub fn mk_attr_outer(span: Span, id: AttrId, item: MetaItem) -> Attribute { mk_spanned_attr_outer(span, id, item) } /// Returns an outer attribute with the given value and span. pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute { Attribute { id, style: ast::AttrStyle::Outer, path: item.ident, tokens: item.node.tokens(item.span), is_sugared_doc: false, span: sp, } } pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute { let style = doc_comment_style(&text.as_str()); let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked)); Attribute { id, style, path: Path::from_ident(Ident::from_str("doc").with_span_pos(span)), tokens: MetaItemKind::NameValue(lit).tokens(span), is_sugared_doc: true, span, } } pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool { items.iter().any(|item| { item.check_name(name) }) } pub fn contains_name(attrs: &[Attribute], name: &str) -> bool { attrs.iter().any(|item| { item.check_name(name) }) } pub fn find_by_name<'a>(attrs: &'a [Attribute], name: &str) -> Option<&'a Attribute> { attrs.iter().find(|attr| attr.check_name(name)) } pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) -> Option<Symbol> { attrs.iter() .find(|at| at.check_name(name)) .and_then(|at| at.value_str()) } impl MetaItem { fn tokens(&self) -> TokenStream { let mut idents = vec![]; let mut last_pos = BytePos(0 as u32); for (i, segment) in self.ident.segments.iter().enumerate() { let is_first = i == 0; if !is_first { let mod_sep_span = Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt()); idents.push(TokenTree::Token(mod_sep_span, Token::ModSep).into()); } idents.push(TokenTree::Token(segment.ident.span, Token::from_ast_ident(segment.ident)).into()); last_pos = segment.ident.span.hi(); } idents.push(self.node.tokens(self.span)); TokenStream::concat(idents) } fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem> where I: Iterator<Item = TokenTree>, { // FIXME: Share code with `parse_path`. let ident = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident, _))) => { if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() { let mut segments = vec![PathSegment::from_ident(ident.with_span_pos(span))]; tokens.next(); loop { if let Some(TokenTree::Token(span, Token::Ident(ident, _))) = tokens.next() { segments.push(PathSegment::from_ident(ident.with_span_pos(span))); } else { return None; } if let Some(TokenTree::Token(_, Token::ModSep)) = tokens.peek() { tokens.next(); } else { break; } } let span = span.with_hi(segments.last().unwrap().ident.span.hi()); Path { span, segments } } else { Path::from_ident(ident.with_span_pos(span)) } } Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 { token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident), token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), token::Nonterminal::NtPath(ref path) => path.clone(), _ => return None, }, _ => return None, }; let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi()); let node = MetaItemKind::from_tokens(tokens)?; let hi = match node { MetaItemKind::NameValue(ref lit) => lit.span.hi(), MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(ident.span.hi()), _ => ident.span.hi(), }; let span = ident.span.with_hi(hi); Some(MetaItem { ident, node, span }) } } impl MetaItemKind { pub fn tokens(&self, span: Span) -> TokenStream { match *self { MetaItemKind::Word => TokenStream::empty(), MetaItemKind::NameValue(ref lit) => { TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()]) } MetaItemKind::List(ref list) => { let mut tokens = Vec::new(); for (i, item) in list.iter().enumerate() { if i > 0 { tokens.push(TokenTree::Token(span, Token::Comma).into()); } tokens.push(item.node.tokens()); } TokenTree::Delimited(span, Delimited { delim: token::Paren, tts: TokenStream::concat(tokens).into(), }).into() } } } fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind> where I: Iterator<Item = TokenTree>, { let delimited = match tokens.peek().cloned() { Some(TokenTree::Token(_, token::Eq)) => { tokens.next(); return if let Some(TokenTree::Token(span, token)) = tokens.next() { LitKind::from_token(token) .map(|lit| MetaItemKind::NameValue(Spanned { node: lit, span: span })) } else { None }; } Some(TokenTree::Delimited(_, ref delimited)) if delimited.delim == token::Paren => { tokens.next(); delimited.stream() } _ => return Some(MetaItemKind::Word), }; let mut tokens = delimited.into_trees().peekable(); let mut result = Vec::new(); while let Some(..) = tokens.peek() { let item = NestedMetaItemKind::from_tokens(&mut tokens)?; result.push(respan(item.span(), item)); match tokens.next() { None | Some(TokenTree::Token(_, Token::Comma)) => {} _ => return None, } } Some(MetaItemKind::List(result)) } } impl NestedMetaItemKind { fn span(&self) -> Span { match *self { NestedMetaItemKind::MetaItem(ref item) => item.span, NestedMetaItemKind::Literal(ref lit) => lit.span, } } fn tokens(&self) -> TokenStream { match *self { NestedMetaItemKind::MetaItem(ref item) => item.tokens(), NestedMetaItemKind::Literal(ref lit) => lit.tokens(), } } fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItemKind> where I: Iterator<Item = TokenTree>, { if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() { if let Some(node) = LitKind::from_token(token) { tokens.next(); return Some(NestedMetaItemKind::Literal(respan(span, node))); } } MetaItem::from_tokens(tokens).map(NestedMetaItemKind::MetaItem) } } impl Lit { crate fn tokens(&self) -> TokenStream { TokenTree::Token(self.span, self.node.token()).into() } } impl LitKind { fn token(&self) -> Token { use std::ascii; match *self { LitKind::Str(string, ast::StrStyle::Cooked) => { let escaped = string.as_str().escape_default(); Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None) } LitKind::Str(string, ast::StrStyle::Raw(n)) => { Token::Literal(token::Lit::StrRaw(string, n), None) } LitKind::ByteStr(ref bytes) => { let string = bytes.iter().cloned().flat_map(ascii::escape_default) .map(Into::<char>::into).collect::<String>(); Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None) } LitKind::Byte(byte) => { let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect(); Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None) } LitKind::Char(ch) => { let string: String = ch.escape_default().map(Into::<char>::into).collect(); Token::Literal(token::Lit::Char(Symbol::intern(&string)), None) } LitKind::Int(n, ty) => { let suffix = match ty { ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())), ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())), ast::LitIntType::Unsuffixed => None, }; Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix) } LitKind::Float(symbol, ty) => { Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string()))) } LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None), LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value { "true" } else { "false" })), false), } } fn from_token(token: Token) -> Option<LitKind> { match token { Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)), Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)), Token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { ExprKind::Lit(ref lit) => Some(lit.node.clone()), _ => None, }, _ => None, }, Token::Literal(lit, suf) => { let (suffix_illegal, result) = parse::lit_token(lit, suf, None); if suffix_illegal && suf.is_some() { return None; } result } _ => None, } } } pub trait HasAttrs: Sized { fn attrs(&self) -> &[ast::Attribute]; fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self; } impl<T: HasAttrs> HasAttrs for Spanned<T> { fn attrs(&self) -> &[ast::Attribute] { self.node.attrs() } fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self { respan(self.span, self.node.map_attrs(f)) } } impl HasAttrs for Vec<Attribute> { fn attrs(&self) -> &[Attribute] { self } fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self { f(self) } } impl HasAttrs for ThinVec<Attribute> { fn attrs(&self) -> &[Attribute] { self } fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self { f(self.into()).into() } } impl<T: HasAttrs + 'static> HasAttrs for P<T> { fn attrs(&self) -> &[Attribute] { (**self).attrs() } fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self { self.map(|t| t.map_attrs(f)) } } impl HasAttrs for StmtKind { fn attrs(&self) -> &[Attribute] { match *self { StmtKind::Local(ref local) => local.attrs(), StmtKind::Item(..) => &[], StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.attrs(), StmtKind::Mac(ref mac) => { let (_, _, ref attrs) = **mac; attrs.attrs() } } } fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self { match self { StmtKind::Local(local) => StmtKind::Local(local.map_attrs(f)), StmtKind::Item(..) => self, StmtKind::Expr(expr) => StmtKind::Expr(expr.map_attrs(f)), StmtKind::Semi(expr) => StmtKind::Semi(expr.map_attrs(f)), StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, style, attrs)| { (mac, style, attrs.map_attrs(f)) })), } } } impl HasAttrs for Stmt { fn attrs(&self) -> &[ast::Attribute] { self.node.attrs() } fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self { Stmt { id: self.id, node: self.node.map_attrs(f), span: self.span } } } impl HasAttrs for GenericParam { fn attrs(&self) -> &[ast::Attribute] { &self.attrs } fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(mut self, f: F) -> Self { self.attrs = self.attrs.map_attrs(f); self } } macro_rules! derive_has_attrs { ($($ty:path),*) => { $( impl HasAttrs for $ty { fn attrs(&self) -> &[Attribute] { &self.attrs } fn map_attrs<F>(mut self, f: F) -> Self where F: FnOnce(Vec<Attribute>) -> Vec<Attribute>, { self.attrs = self.attrs.map_attrs(f); self } } )* } } derive_has_attrs! { Item, Expr, Local, ast::ForeignItem, ast::StructField, ast::ImplItem, ast::TraitItem, ast::Arm, ast::Field, ast::FieldPat, ast::Variant_ } pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate { for raw_attr in attrs { let mut parser = parse::new_parser_from_source_str( parse_sess, FileName::CliCrateAttr, raw_attr.clone(), ); let start_span = parser.span; let (path, tokens) = panictry!(parser.parse_meta_item_unrestricted()); let end_span = parser.span; if parser.token != token::Eof { parse_sess.span_diagnostic .span_err(start_span.to(end_span), "invalid crate attribute"); continue; } krate.attrs.push(Attribute { id: mk_attr_id(), style: AttrStyle::Inner, path, tokens, is_sugared_doc: false, span: start_span.to(end_span), }); } krate }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/attr/builtin.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Parsing and validation of builtin attributes use ast::{self, Attribute, MetaItem, Name, NestedMetaItemKind}; use errors::{Applicability, Handler}; use feature_gate::{Features, GatedCfg}; use parse::ParseSess; use syntax_pos::{symbol::Symbol, Span}; use super::{list_contains_name, mark_used, MetaItemKind}; enum AttrError { MultipleItem(Name), UnknownMetaItem(Name, &'static [&'static str]), MissingSince, MissingFeature, MultipleStabilityLevels, UnsupportedLiteral } fn handle_errors(diag: &Handler, span: Span, error: AttrError) { match error { AttrError::MultipleItem(item) => span_err!(diag, span, E0538, "multiple '{}' items", item), AttrError::UnknownMetaItem(item, expected) => { let expected = expected .iter() .map(|name| format!("`{}`", name)) .collect::<Vec<_>>(); struct_span_err!(diag, span, E0541, "unknown meta item '{}'", item) .span_label(span, format!("expected one of {}", expected.join(", "))) .emit(); } AttrError::MissingSince => span_err!(diag, span, E0542, "missing 'since'"), AttrError::MissingFeature => span_err!(diag, span, E0546, "missing 'feature'"), AttrError::MultipleStabilityLevels => span_err!(diag, span, E0544, "multiple stability levels"), AttrError::UnsupportedLiteral => span_err!(diag, span, E0565, "unsupported literal"), } } #[derive(Copy, Clone, Hash, PartialEq, RustcEncodable, RustcDecodable)] pub enum InlineAttr { None, Hint, Always, Never, } #[derive(Copy, Clone, PartialEq)] pub enum UnwindAttr { Allowed, Aborts, } /// Determine what `#[unwind]` attribute is present in `attrs`, if any. pub fn find_unwind_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> Option<UnwindAttr> { let syntax_error = |attr: &Attribute| { mark_used(attr); diagnostic.map(|d| { span_err!(d, attr.span, E0633, "malformed `#[unwind]` attribute"); }); None }; attrs.iter().fold(None, |ia, attr| { if attr.path != "unwind" { return ia; } let meta = match attr.meta() { Some(meta) => meta.node, None => return ia, }; match meta { MetaItemKind::Word => { syntax_error(attr) } MetaItemKind::List(ref items) => { mark_used(attr); if items.len() != 1 { syntax_error(attr) } else if list_contains_name(&items[..], "allowed") { Some(UnwindAttr::Allowed) } else if list_contains_name(&items[..], "aborts") { Some(UnwindAttr::Aborts) } else { syntax_error(attr) } } _ => ia, } }) } /// Represents the #[stable], #[unstable], #[rustc_{deprecated,const_unstable}] attributes. #[derive(RustcEncodable, RustcDecodable, Clone, Debug, PartialEq, Eq, Hash)] pub struct Stability { pub level: StabilityLevel, pub feature: Symbol, pub rustc_depr: Option<RustcDeprecation>, pub rustc_const_unstable: Option<RustcConstUnstable>, } /// The available stability levels. #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] pub enum StabilityLevel { // Reason for the current stability level and the relevant rust-lang issue Unstable { reason: Option<Symbol>, issue: u32 }, Stable { since: Symbol }, } impl StabilityLevel { pub fn is_unstable(&self) -> bool { if let StabilityLevel::Unstable {..} = *self { true } else { false } } pub fn is_stable(&self) -> bool { if let StabilityLevel::Stable {..} = *self { true } else { false } } } #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] pub struct RustcDeprecation { pub since: Symbol, pub reason: Symbol, } #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] pub struct RustcConstUnstable { pub feature: Symbol, } /// Check if `attrs` contains an attribute like `#![feature(feature_name)]`. /// This will not perform any "sanity checks" on the form of the attributes. pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool { attrs.iter().any(|item| { item.check_name("feature") && item.meta_item_list().map(|list| { list.iter().any(|mi| { mi.word().map(|w| w.name() == feature_name) .unwrap_or(false) }) }).unwrap_or(false) }) } /// Find the first stability attribute. `None` if none exists. pub fn find_stability(diagnostic: &Handler, attrs: &[Attribute], item_sp: Span) -> Option<Stability> { find_stability_generic(diagnostic, attrs.iter(), item_sp) } fn find_stability_generic<'a, I>(diagnostic: &Handler, attrs_iter: I, item_sp: Span) -> Option<Stability> where I: Iterator<Item = &'a Attribute> { use self::StabilityLevel::*; let mut stab: Option<Stability> = None; let mut rustc_depr: Option<RustcDeprecation> = None; let mut rustc_const_unstable: Option<RustcConstUnstable> = None; 'outer: for attr in attrs_iter { if ![ "rustc_deprecated", "rustc_const_unstable", "unstable", "stable", ].iter().any(|&s| attr.path == s) { continue // not a stability level } mark_used(attr); let meta = attr.meta(); if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta { let meta = meta.as_ref().unwrap(); let get = |meta: &MetaItem, item: &mut Option<Symbol>| { if item.is_some() { handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); return false } if let Some(v) = meta.value_str() { *item = Some(v); true } else { span_err!(diagnostic, meta.span, E0539, "incorrect meta item"); false } }; macro_rules! get_meta { ($($name:ident),+) => { $( let mut $name = None; )+ for meta in metas { if let Some(mi) = meta.meta_item() { match &*mi.name().as_str() { $( stringify!($name) => if !get(mi, &mut $name) { continue 'outer }, )+ _ => { let expected = &[ $( stringify!($name) ),+ ]; handle_errors( diagnostic, mi.span, AttrError::UnknownMetaItem(mi.name(), expected)); continue 'outer } } } else { handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral); continue 'outer } } } } match &*meta.name().as_str() { "rustc_deprecated" => { if rustc_depr.is_some() { span_err!(diagnostic, item_sp, E0540, "multiple rustc_deprecated attributes"); continue 'outer } get_meta!(since, reason); match (since, reason) { (Some(since), Some(reason)) => { rustc_depr = Some(RustcDeprecation { since, reason, }) } (None, _) => { handle_errors(diagnostic, attr.span(), AttrError::MissingSince); continue } _ => { span_err!(diagnostic, attr.span(), E0543, "missing 'reason'"); continue } } } "rustc_const_unstable" => { if rustc_const_unstable.is_some() { span_err!(diagnostic, item_sp, E0553, "multiple rustc_const_unstable attributes"); continue 'outer } get_meta!(feature); if let Some(feature) = feature { rustc_const_unstable = Some(RustcConstUnstable { feature }); } else { span_err!(diagnostic, attr.span(), E0629, "missing 'feature'"); continue } } "unstable" => { if stab.is_some() { handle_errors(diagnostic, attr.span(), AttrError::MultipleStabilityLevels); break } let mut feature = None; let mut reason = None; let mut issue = None; for meta in metas { if let Some(mi) = meta.meta_item() { match &*mi.name().as_str() { "feature" => if !get(mi, &mut feature) { continue 'outer }, "reason" => if !get(mi, &mut reason) { continue 'outer }, "issue" => if !get(mi, &mut issue) { continue 'outer }, _ => { handle_errors( diagnostic, meta.span, AttrError::UnknownMetaItem( mi.name(), &["feature", "reason", "issue"] ), ); continue 'outer } } } else { handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral); continue 'outer } } match (feature, reason, issue) { (Some(feature), reason, Some(issue)) => { stab = Some(Stability { level: Unstable { reason, issue: { if let Ok(issue) = issue.as_str().parse() { issue } else { span_err!(diagnostic, attr.span(), E0545, "incorrect 'issue'"); continue } } }, feature, rustc_depr: None, rustc_const_unstable: None, }) } (None, _, _) => { handle_errors(diagnostic, attr.span(), AttrError::MissingFeature); continue } _ => { span_err!(diagnostic, attr.span(), E0547, "missing 'issue'"); continue } } } "stable" => { if stab.is_some() { handle_errors(diagnostic, attr.span(), AttrError::MultipleStabilityLevels); break } let mut feature = None; let mut since = None; for meta in metas { if let NestedMetaItemKind::MetaItem(ref mi) = meta.node { match &*mi.name().as_str() { "feature" => if !get(mi, &mut feature) { continue 'outer }, "since" => if !get(mi, &mut since) { continue 'outer }, _ => { handle_errors( diagnostic, meta.span, AttrError::UnknownMetaItem(mi.name(), &["since", "note"]), ); continue 'outer } } } else { handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral); continue 'outer } } match (feature, since) { (Some(feature), Some(since)) => { stab = Some(Stability { level: Stable { since, }, feature, rustc_depr: None, rustc_const_unstable: None, }) } (None, _) => { handle_errors(diagnostic, attr.span(), AttrError::MissingFeature); continue } _ => { handle_errors(diagnostic, attr.span(), AttrError::MissingSince); continue } } } _ => unreachable!() } } else { span_err!(diagnostic, attr.span(), E0548, "incorrect stability attribute type"); continue } } // Merge the deprecation info into the stability info if let Some(rustc_depr) = rustc_depr { if let Some(ref mut stab) = stab { stab.rustc_depr = Some(rustc_depr); } else { span_err!(diagnostic, item_sp, E0549, "rustc_deprecated attribute must be paired with \ either stable or unstable attribute"); } } // Merge the const-unstable info into the stability info if let Some(rustc_const_unstable) = rustc_const_unstable { if let Some(ref mut stab) = stab { stab.rustc_const_unstable = Some(rustc_const_unstable); } else { span_err!(diagnostic, item_sp, E0630, "rustc_const_unstable attribute must be paired with \ either stable or unstable attribute"); } } stab } pub fn find_crate_name(attrs: &[Attribute]) -> Option<Symbol> { super::first_attr_value_str_by_name(attrs, "crate_name") } /// Tests if a cfg-pattern matches the cfg set pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Features>) -> bool { eval_condition(cfg, sess, &mut |cfg| { if let (Some(feats), Some(gated_cfg)) = (features, GatedCfg::gate(cfg)) { gated_cfg.check_and_emit(sess, feats); } sess.config.contains(&(cfg.name(), cfg.value_str())) }) } /// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to /// evaluate individual items. pub fn eval_condition<F>(cfg: &ast::MetaItem, sess: &ParseSess, eval: &mut F) -> bool where F: FnMut(&ast::MetaItem) -> bool { match cfg.node { ast::MetaItemKind::List(ref mis) => { for mi in mis.iter() { if !mi.is_meta_item() { handle_errors(&sess.span_diagnostic, mi.span, AttrError::UnsupportedLiteral); return false; } } // The unwraps below may look dangerous, but we've already asserted // that they won't fail with the loop above. match &*cfg.name().as_str() { "any" => mis.iter().any(|mi| { eval_condition(mi.meta_item().unwrap(), sess, eval) }), "all" => mis.iter().all(|mi| { eval_condition(mi.meta_item().unwrap(), sess, eval) }), "not" => { if mis.len() != 1 { span_err!(sess.span_diagnostic, cfg.span, E0536, "expected 1 cfg-pattern"); return false; } !eval_condition(mis[0].meta_item().unwrap(), sess, eval) }, p => { span_err!(sess.span_diagnostic, cfg.span, E0537, "invalid predicate `{}`", p); false } } }, ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => { eval(cfg) } } } #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] pub struct Deprecation { pub since: Option<Symbol>, pub note: Option<Symbol>, } /// Find the deprecation attribute. `None` if none exists. pub fn find_deprecation(diagnostic: &Handler, attrs: &[Attribute], item_sp: Span) -> Option<Deprecation> { find_deprecation_generic(diagnostic, attrs.iter(), item_sp) } fn find_deprecation_generic<'a, I>(diagnostic: &Handler, attrs_iter: I, item_sp: Span) -> Option<Deprecation> where I: Iterator<Item = &'a Attribute> { let mut depr: Option<Deprecation> = None; 'outer: for attr in attrs_iter { if attr.path != "deprecated" { continue } mark_used(attr); if depr.is_some() { span_err!(diagnostic, item_sp, E0550, "multiple deprecated attributes"); break } depr = if let Some(metas) = attr.meta_item_list() { let get = |meta: &MetaItem, item: &mut Option<Symbol>| { if item.is_some() { handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); return false } if let Some(v) = meta.value_str() { *item = Some(v); true } else { span_err!(diagnostic, meta.span, E0551, "incorrect meta item"); false } }; let mut since = None; let mut note = None; for meta in metas { if let NestedMetaItemKind::MetaItem(ref mi) = meta.node { match &*mi.name().as_str() { "since" => if !get(mi, &mut since) { continue 'outer }, "note" => if !get(mi, &mut note) { continue 'outer }, _ => { handle_errors( diagnostic, meta.span, AttrError::UnknownMetaItem(mi.name(), &["since", "note"]), ); continue 'outer } } } else { handle_errors(diagnostic, meta.span, AttrError::UnsupportedLiteral); continue 'outer } } Some(Deprecation {since: since, note: note}) } else { Some(Deprecation{since: None, note: None}) } } depr } #[derive(PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)] pub enum ReprAttr { ReprInt(IntType), ReprC, ReprPacked(u32), ReprSimd, ReprTransparent, ReprAlign(u32), } #[derive(Eq, Hash, PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)] pub enum IntType { SignedInt(ast::IntTy), UnsignedInt(ast::UintTy) } impl IntType { #[inline] pub fn is_signed(self) -> bool { use self::IntType::*; match self { SignedInt(..) => true, UnsignedInt(..) => false } } } /// Parse #[repr(...)] forms. /// /// Valid repr contents: any of the primitive integral type names (see /// `int_type_of_word`, below) to specify enum discriminant type; `C`, to use /// the same discriminant size that the corresponding C enum would or C /// structure layout, `packed` to remove padding, and `transparent` to elegate representation /// concerns to the only non-ZST field. pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr> { use self::ReprAttr::*; let mut acc = Vec::new(); if attr.path == "repr" { if let Some(items) = attr.meta_item_list() { mark_used(attr); for item in items { if !item.is_meta_item() { handle_errors(diagnostic, item.span, AttrError::UnsupportedLiteral); continue } let mut recognised = false; if let Some(mi) = item.word() { let word = &*mi.name().as_str(); let hint = match word { "C" => Some(ReprC), "packed" => Some(ReprPacked(1)), "simd" => Some(ReprSimd), "transparent" => Some(ReprTransparent), _ => match int_type_of_word(word) { Some(ity) => Some(ReprInt(ity)), None => { None } } }; if let Some(h) = hint { recognised = true; acc.push(h); } } else if let Some((name, value)) = item.name_value_literal() { let parse_alignment = |node: &ast::LitKind| -> Result<u32, &'static str> { if let ast::LitKind::Int(literal, ast::LitIntType::Unsuffixed) = node { if literal.is_power_of_two() { // rustc::ty::layout::Align restricts align to <= 2^29 if *literal <= 1 << 29 { Ok(*literal as u32) } else { Err("larger than 2^29") } } else { Err("not a power of two") } } else { Err("not an unsuffixed integer") } }; let mut literal_error = None; if name == "align" { recognised = true; match parse_alignment(&value.node) { Ok(literal) => acc.push(ReprAlign(literal)), Err(message) => literal_error = Some(message) }; } else if name == "packed" { recognised = true; match parse_alignment(&value.node) { Ok(literal) => acc.push(ReprPacked(literal)), Err(message) => literal_error = Some(message) }; } if let Some(literal_error) = literal_error { span_err!(diagnostic, item.span, E0589, "invalid `repr(align)` attribute: {}", literal_error); } } else { if let Some(meta_item) = item.meta_item() { if meta_item.name() == "align" { if let MetaItemKind::NameValue(ref value) = meta_item.node { recognised = true; let mut err = struct_span_err!(diagnostic, item.span, E0693, "incorrect `repr(align)` attribute format"); match value.node { ast::LitKind::Int(int, ast::LitIntType::Unsuffixed) => { err.span_suggestion_with_applicability( item.span, "use parentheses instead", format!("align({})", int), Applicability::MachineApplicable ); } ast::LitKind::Str(s, _) => { err.span_suggestion_with_applicability( item.span, "use parentheses instead", format!("align({})", s), Applicability::MachineApplicable ); } _ => {} } err.emit(); } } } } if !recognised { // Not a word we recognize span_err!(diagnostic, item.span, E0552, "unrecognized representation hint"); } } } } acc } fn int_type_of_word(s: &str) -> Option<IntType> { use self::IntType::*; match s { "i8" => Some(SignedInt(ast::IntTy::I8)), "u8" => Some(UnsignedInt(ast::UintTy::U8)), "i16" => Some(SignedInt(ast::IntTy::I16)), "u16" => Some(UnsignedInt(ast::UintTy::U16)), "i32" => Some(SignedInt(ast::IntTy::I32)), "u32" => Some(UnsignedInt(ast::UintTy::U32)), "i64" => Some(SignedInt(ast::IntTy::I64)), "u64" => Some(UnsignedInt(ast::UintTy::U64)), "i128" => Some(SignedInt(ast::IntTy::I128)), "u128" => Some(UnsignedInt(ast::UintTy::U128)), "isize" => Some(SignedInt(ast::IntTy::Isize)), "usize" => Some(UnsignedInt(ast::UintTy::Usize)), _ => None } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/diagnostics/metadata.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This module contains utilities for outputting metadata for diagnostic errors. //! //! Each set of errors is mapped to a metadata file by a name, which is //! currently always a crate name. use std::collections::BTreeMap; use std::env; use std::fs::{remove_file, create_dir_all, File}; use std::io::Write; use std::path::PathBuf; use std::error::Error; use rustc_serialize::json::as_json; use syntax_pos::{Span, FileName}; use ext::base::ExtCtxt; use diagnostics::plugin::{ErrorMap, ErrorInfo}; /// JSON encodable/decodable version of `ErrorInfo`. #[derive(PartialEq, RustcDecodable, RustcEncodable)] pub struct ErrorMetadata { pub description: Option<String>, pub use_site: Option<ErrorLocation> } /// Mapping from error codes to metadata that can be (de)serialized. pub type ErrorMetadataMap = BTreeMap<String, ErrorMetadata>; /// JSON encodable error location type with filename and line number. #[derive(PartialEq, RustcDecodable, RustcEncodable)] pub struct ErrorLocation { pub filename: FileName, pub line: usize } impl ErrorLocation { /// Create an error location from a span. pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation { let loc = ecx.source_map().lookup_char_pos_adj(sp.lo()); ErrorLocation { filename: loc.filename, line: loc.line } } } /// Get the directory where metadata for a given `prefix` should be stored. /// /// See `output_metadata`. pub fn get_metadata_dir(prefix: &str) -> PathBuf { env::var_os("RUSTC_ERROR_METADATA_DST") .map(PathBuf::from) .expect("env var `RUSTC_ERROR_METADATA_DST` isn't set") .join(prefix) } /// Map `name` to a path in the given directory: <directory>/<name>.json fn get_metadata_path(directory: PathBuf, name: &str) -> PathBuf { directory.join(format!("{}.json", name)) } /// Write metadata for the errors in `err_map` to disk, to a file corresponding to `prefix/name`. /// /// For our current purposes the prefix is the target architecture and the name is a crate name. /// If an error occurs steps will be taken to ensure that no file is created. pub fn output_metadata(ecx: &ExtCtxt, prefix: &str, name: &str, err_map: &ErrorMap) -> Result<(), Box<dyn Error>> { // Create the directory to place the file in. let metadata_dir = get_metadata_dir(prefix); create_dir_all(&metadata_dir)?; // Open the metadata file. let metadata_path = get_metadata_path(metadata_dir, name); let mut metadata_file = File::create(&metadata_path)?; // Construct a serializable map. let json_map = err_map.iter().map(|(k, &ErrorInfo { description, use_site })| { let key = k.as_str().to_string(); let value = ErrorMetadata { description: description.map(|n| n.as_str().to_string()), use_site: use_site.map(|sp| ErrorLocation::from_span(ecx, sp)) }; (key, value) }).collect::<ErrorMetadataMap>(); // Write the data to the file, deleting it if the write fails. let result = write!(&mut metadata_file, "{}", as_json(&json_map)); if result.is_err() { remove_file(&metadata_path)?; } Ok(result?) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/diagnostics/macros.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[macro_export] macro_rules! register_diagnostic { ($code:tt, $description:tt) => (__register_diagnostic! { $code, $description }); ($code:tt) => (__register_diagnostic! { $code }) } #[macro_export] macro_rules! span_fatal { ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); $session.span_fatal_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) }) } #[macro_export] macro_rules! span_err { ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); $session.span_err_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) }) } #[macro_export] macro_rules! span_warn { ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); $session.span_warn_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) }) } #[macro_export] macro_rules! struct_err { ($session:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); $session.struct_err_with_code( &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) }) } #[macro_export] macro_rules! span_err_or_warn { ($is_warning:expr, $session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); if $is_warning { $session.span_warn_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) } else { $session.span_err_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) } }) } #[macro_export] macro_rules! struct_span_fatal { ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); $session.struct_span_fatal_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) }) } #[macro_export] macro_rules! struct_span_err { ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); $session.struct_span_err_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) }) } #[macro_export] macro_rules! stringify_error_code { ($code:ident) => ({ __diagnostic_used!($code); $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()) }) } #[macro_export] macro_rules! type_error_struct { ($session:expr, $span:expr, $typ:expr, $code:ident, $($message:tt)*) => ({ if $typ.references_error() { $session.diagnostic().struct_dummy() } else { struct_span_err!($session, $span, $code, $($message)*) } }) } #[macro_export] macro_rules! struct_span_warn { ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); $session.struct_span_warn_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) }) } #[macro_export] macro_rules! struct_span_err_or_warn { ($is_warning:expr, $session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); if $is_warning { $session.struct_span_warn_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) } else { $session.struct_span_err_with_code( $span, &format!($($message)*), $crate::errors::DiagnosticId::Error(stringify!($code).to_owned()), ) } }) } #[macro_export] macro_rules! span_note { ($err:expr, $span:expr, $($message:tt)*) => ({ ($err).span_note($span, &format!($($message)*)); }) } #[macro_export] macro_rules! span_help { ($err:expr, $span:expr, $($message:tt)*) => ({ ($err).span_help($span, &format!($($message)*)); }) } #[macro_export] macro_rules! help { ($err:expr, $($message:tt)*) => ({ ($err).help(&format!($($message)*)); }) } #[macro_export] macro_rules! register_diagnostics { ($($code:tt),*) => ( $(register_diagnostic! { $code })* ); ($($code:tt),*,) => ( $(register_diagnostic! { $code })* ) } #[macro_export] macro_rules! register_long_diagnostics { ($($code:tt: $description:tt),*) => ( $(register_diagnostic! { $code, $description })* ); ($($code:tt: $description:tt),*,) => ( $(register_diagnostic! { $code, $description })* ) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax/diagnostics/plugin.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::collections::BTreeMap; use std::env; use ast; use ast::{Ident, Name}; use source_map; use syntax_pos::Span; use ext::base::{ExtCtxt, MacEager, MacResult}; use ext::build::AstBuilder; use parse::token; use ptr::P; use OneVector; use symbol::{keywords, Symbol}; use tokenstream::{TokenTree}; use diagnostics::metadata::output_metadata; pub use errors::*; // Maximum width of any line in an extended error description (inclusive). const MAX_DESCRIPTION_WIDTH: usize = 80; /// Error information type. pub struct ErrorInfo { pub description: Option<Name>, pub use_site: Option<Span> } /// Mapping from error codes to metadata. pub type ErrorMap = BTreeMap<Name, ErrorInfo>; pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[TokenTree]) -> Box<dyn MacResult+'cx> { let code = match (token_tree.len(), token_tree.get(0)) { (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, _ => unreachable!() }; ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| { match diagnostics.get_mut(&code.name) { // Previously used errors. Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => { ecx.struct_span_warn(span, &format!( "diagnostic code {} already used", code )).span_note(previous_span, "previous invocation") .emit(); } // Newly used errors. Some(ref mut info) => { info.use_site = Some(span); } // Unregistered errors. None => { ecx.span_err(span, &format!( "used diagnostic code {} not registered", code )); } } }); MacEager::expr(ecx.expr_tuple(span, Vec::new())) } pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[TokenTree]) -> Box<dyn MacResult+'cx> { let (code, description) = match ( token_tree.len(), token_tree.get(0), token_tree.get(1), token_tree.get(2) ) { (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { (code, None) }, (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), Some(&TokenTree::Token(_, token::Comma)), Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => { (code, Some(description)) } _ => unreachable!() }; // Check that the description starts and ends with a newline and doesn't // overflow the maximum line width. description.map(|raw_msg| { let msg = raw_msg.as_str(); if !msg.starts_with("\n") || !msg.ends_with("\n") { ecx.span_err(span, &format!( "description for error code {} doesn't start and end with a newline", code )); } // URLs can be unavoidably longer than the line limit, so we allow them. // Allowed format is: `[name]: https://www.rust-lang.org/` let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http"); if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) { ecx.span_err(span, &format!( "description for error code {} contains a line longer than {} characters.\n\ if you're inserting a long URL use the footnote style to bypass this check.", code, MAX_DESCRIPTION_WIDTH )); } }); // Add the error to the map. ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| { let info = ErrorInfo { description, use_site: None }; if diagnostics.insert(code.name, info).is_some() { ecx.span_err(span, &format!( "diagnostic code {} already registered", code )); } }); let sym = Ident::with_empty_ctxt(Symbol::gensym(&format!( "__register_diagnostic_{}", code ))); MacEager::items(OneVector::from_vec(vec![ ecx.item_mod( span, span, sym, Vec::new(), Vec::new() ) ])) } pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[TokenTree]) -> Box<dyn MacResult+'cx> { assert_eq!(token_tree.len(), 3); let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. &TokenTree::Token(_, token::Ident(ref crate_name, _)), // DIAGNOSTICS ident. &TokenTree::Token(_, token::Ident(ref name, _)) ) => (*&crate_name, name), _ => unreachable!() }; // Output error metadata to `tmp/extended-errors/<target arch>/<crate name>.json` if let Ok(target_triple) = env::var("CFG_COMPILER_HOST_TRIPLE") { ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| { if let Err(e) = output_metadata(ecx, &target_triple, &crate_name.as_str(), diagnostics) { ecx.span_bug(span, &format!( "error writing metadata for triple `{}` and crate `{}`, error: {}, \ cause: {:?}", target_triple, crate_name, e.description(), e.cause() )); } }); } else { ecx.span_err(span, &format!( "failed to write metadata for crate `{}` because $CFG_COMPILER_HOST_TRIPLE is not set", crate_name)); } // Construct the output expression. let (count, expr) = ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| { let descriptions: Vec<P<ast::Expr>> = diagnostics.iter().filter_map(|(&code, info)| { info.description.map(|description| { ecx.expr_tuple(span, vec![ ecx.expr_str(span, code), ecx.expr_str(span, description) ]) }) }).collect(); (descriptions.len(), ecx.expr_vec(span, descriptions)) }); let static_ = ecx.lifetime(span, keywords::StaticLifetime.ident()); let ty_str = ecx.ty_rptr( span, ecx.ty_ident(span, ecx.ident_of("str")), Some(static_), ast::Mutability::Immutable, ); let ty = ecx.ty( span, ast::TyKind::Array( ecx.ty( span, ast::TyKind::Tup(vec![ty_str.clone(), ty_str]) ), ast::AnonConst { id: ast::DUMMY_NODE_ID, value: ecx.expr_usize(span, count), }, ), ); MacEager::items(OneVector::from_vec(vec![ P(ast::Item { ident: *name, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Const( ty, expr, ), vis: source_map::respan(span.shrink_to_lo(), ast::VisibilityKind::Public), span, tokens: None, }) ])) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax_pos/analyze_source_file.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use unicode_width::UnicodeWidthChar; use super::*; /// Find all newlines, multi-byte characters, and non-narrow characters in a /// SourceFile. /// /// This function will use an SSE2 enhanced implementation if hardware support /// is detected at runtime. pub fn analyze_source_file( src: &str, source_file_start_pos: BytePos) -> (Vec<BytePos>, Vec<MultiByteChar>, Vec<NonNarrowChar>) { let mut lines = vec![source_file_start_pos]; let mut multi_byte_chars = vec![]; let mut non_narrow_chars = vec![]; // Calls the right implementation, depending on hardware support available. analyze_source_file_dispatch(src, source_file_start_pos, &mut lines, &mut multi_byte_chars, &mut non_narrow_chars); // The code above optimistically registers a new line *after* each \n // it encounters. If that point is already outside the source_file, remove // it again. if let Some(&last_line_start) = lines.last() { let file_map_end = source_file_start_pos + BytePos::from_usize(src.len()); assert!(file_map_end >= last_line_start); if last_line_start == file_map_end { lines.pop(); } } (lines, multi_byte_chars, non_narrow_chars) } cfg_if! { if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), not(stage0)))] { fn analyze_source_file_dispatch(src: &str, source_file_start_pos: BytePos, lines: &mut Vec<BytePos>, multi_byte_chars: &mut Vec<MultiByteChar>, non_narrow_chars: &mut Vec<NonNarrowChar>) { if is_x86_feature_detected!("sse2") { unsafe { analyze_source_file_sse2(src, source_file_start_pos, lines, multi_byte_chars, non_narrow_chars); } } else { analyze_source_file_generic(src, src.len(), source_file_start_pos, lines, multi_byte_chars, non_narrow_chars); } } /// Check 16 byte chunks of text at a time. If the chunk contains /// something other than printable ASCII characters and newlines, the /// function falls back to the generic implementation. Otherwise it uses /// SSE2 intrinsics to quickly find all newlines. #[target_feature(enable = "sse2")] unsafe fn analyze_source_file_sse2(src: &str, output_offset: BytePos, lines: &mut Vec<BytePos>, multi_byte_chars: &mut Vec<MultiByteChar>, non_narrow_chars: &mut Vec<NonNarrowChar>) { #[cfg(target_arch = "x86")] use std::arch::x86::*; #[cfg(target_arch = "x86_64")] use std::arch::x86_64::*; const CHUNK_SIZE: usize = 16; let src_bytes = src.as_bytes(); let chunk_count = src.len() / CHUNK_SIZE; // This variable keeps track of where we should start decoding a // chunk. If a multi-byte character spans across chunk boundaries, // we need to skip that part in the next chunk because we already // handled it. let mut intra_chunk_offset = 0; for chunk_index in 0 .. chunk_count { let ptr = src_bytes.as_ptr() as *const __m128i; // We don't know if the pointer is aligned to 16 bytes, so we // use `loadu`, which supports unaligned loading. let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize)); // For character in the chunk, see if its byte value is < 0, which // indicates that it's part of a UTF-8 char. let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)); // Create a bit mask from the comparison results. let multibyte_mask = _mm_movemask_epi8(multibyte_test); // If the bit mask is all zero, we only have ASCII chars here: if multibyte_mask == 0 { assert!(intra_chunk_offset == 0); // Check if there are any control characters in the chunk. All // control characters that we can encounter at this point have a // byte value less than 32 or ... let control_char_test0 = _mm_cmplt_epi8(chunk, _mm_set1_epi8(32)); let control_char_mask0 = _mm_movemask_epi8(control_char_test0); // ... it's the ASCII 'DEL' character with a value of 127. let control_char_test1 = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127)); let control_char_mask1 = _mm_movemask_epi8(control_char_test1); let control_char_mask = control_char_mask0 | control_char_mask1; if control_char_mask != 0 { // Check for newlines in the chunk let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)); let newlines_mask = _mm_movemask_epi8(newlines_test); if control_char_mask == newlines_mask { // All control characters are newlines, record them let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; let output_offset = output_offset + BytePos::from_usize(chunk_index * CHUNK_SIZE + 1); loop { let index = newlines_mask.trailing_zeros(); if index >= CHUNK_SIZE as u32 { // We have arrived at the end of the chunk. break } lines.push(BytePos(index) + output_offset); // Clear the bit, so we can find the next one. newlines_mask &= (!1) << index; } // We are done for this chunk. All control characters were // newlines and we took care of those. continue } else { // Some of the control characters are not newlines, // fall through to the slow path below. } } else { // No control characters, nothing to record for this chunk continue } } // The slow path. // There are control chars in here, fallback to generic decoding. let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; intra_chunk_offset = analyze_source_file_generic( &src[scan_start .. ], CHUNK_SIZE - intra_chunk_offset, BytePos::from_usize(scan_start) + output_offset, lines, multi_byte_chars, non_narrow_chars ); } // There might still be a tail left to analyze let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset; if tail_start < src.len() { analyze_source_file_generic(&src[tail_start as usize ..], src.len() - tail_start, output_offset + BytePos::from_usize(tail_start), lines, multi_byte_chars, non_narrow_chars); } } } else { // The target (or compiler version) does not support SSE2 ... fn analyze_source_file_dispatch(src: &str, source_file_start_pos: BytePos, lines: &mut Vec<BytePos>, multi_byte_chars: &mut Vec<MultiByteChar>, non_narrow_chars: &mut Vec<NonNarrowChar>) { analyze_source_file_generic(src, src.len(), source_file_start_pos, lines, multi_byte_chars, non_narrow_chars); } } } // `scan_len` determines the number of bytes in `src` to scan. Note that the // function can read past `scan_len` if a multi-byte character start within the // range but extends past it. The overflow is returned by the function. fn analyze_source_file_generic(src: &str, scan_len: usize, output_offset: BytePos, lines: &mut Vec<BytePos>, multi_byte_chars: &mut Vec<MultiByteChar>, non_narrow_chars: &mut Vec<NonNarrowChar>) -> usize { assert!(src.len() >= scan_len); let mut i = 0; let src_bytes = src.as_bytes(); while i < scan_len { let byte = unsafe { // We verified that i < scan_len <= src.len() *src_bytes.get_unchecked(i as usize) }; // How much to advance in order to get to the next UTF-8 char in the // string. let mut char_len = 1; if byte < 32 { // This is an ASCII control character, it could be one of the cases // that are interesting to us. let pos = BytePos::from_usize(i) + output_offset; match byte { b'\n' => { lines.push(pos + BytePos(1)); } b'\t' => { non_narrow_chars.push(NonNarrowChar::Tab(pos)); } _ => { non_narrow_chars.push(NonNarrowChar::ZeroWidth(pos)); } } } else if byte >= 127 { // The slow path: // This is either ASCII control character "DEL" or the beginning of // a multibyte char. Just decode to `char`. let c = (&src[i..]).chars().next().unwrap(); char_len = c.len_utf8(); let pos = BytePos::from_usize(i) + output_offset; if char_len > 1 { assert!(char_len >=2 && char_len <= 4); let mbc = MultiByteChar { pos, bytes: char_len as u8, }; multi_byte_chars.push(mbc); } // Assume control characters are zero width. // FIXME: How can we decide between `width` and `width_cjk`? let char_width = UnicodeWidthChar::width(c).unwrap_or(0); if char_width != 1 { non_narrow_chars.push(NonNarrowChar::new(pos, char_width)); } } i += char_len; } i - scan_len } macro_rules! test { (case: $test_name:ident, text: $text:expr, source_file_start_pos: $source_file_start_pos:expr, lines: $lines:expr, multi_byte_chars: $multi_byte_chars:expr, non_narrow_chars: $non_narrow_chars:expr,) => ( #[test] fn $test_name() { let (lines, multi_byte_chars, non_narrow_chars) = analyze_source_file($text, BytePos($source_file_start_pos)); let expected_lines: Vec<BytePos> = $lines .into_iter() .map(|pos| BytePos(pos)) .collect(); assert_eq!(lines, expected_lines); let expected_mbcs: Vec<MultiByteChar> = $multi_byte_chars .into_iter() .map(|(pos, bytes)| MultiByteChar { pos: BytePos(pos), bytes, }) .collect(); assert_eq!(multi_byte_chars, expected_mbcs); let expected_nncs: Vec<NonNarrowChar> = $non_narrow_chars .into_iter() .map(|(pos, width)| { NonNarrowChar::new(BytePos(pos), width) }) .collect(); assert_eq!(non_narrow_chars, expected_nncs); }) } test!( case: empty_text, text: "", source_file_start_pos: 0, lines: vec![], multi_byte_chars: vec![], non_narrow_chars: vec![], ); test!( case: newlines_short, text: "a\nc", source_file_start_pos: 0, lines: vec![0, 2], multi_byte_chars: vec![], non_narrow_chars: vec![], ); test!( case: newlines_long, text: "012345678\nabcdef012345678\na", source_file_start_pos: 0, lines: vec![0, 10, 26], multi_byte_chars: vec![], non_narrow_chars: vec![], ); test!( case: newline_and_multi_byte_char_in_same_chunk, text: "01234β789\nbcdef0123456789abcdef", source_file_start_pos: 0, lines: vec![0, 11], multi_byte_chars: vec![(5, 2)], non_narrow_chars: vec![], ); test!( case: newline_and_control_char_in_same_chunk, text: "01234\u{07}6789\nbcdef0123456789abcdef", source_file_start_pos: 0, lines: vec![0, 11], multi_byte_chars: vec![], non_narrow_chars: vec![(5, 0)], ); test!( case: multi_byte_char_short, text: "aβc", source_file_start_pos: 0, lines: vec![0], multi_byte_chars: vec![(1, 2)], non_narrow_chars: vec![], ); test!( case: multi_byte_char_long, text: "0123456789abcΔf012345β", source_file_start_pos: 0, lines: vec![0], multi_byte_chars: vec![(13, 2), (22, 2)], non_narrow_chars: vec![], ); test!( case: multi_byte_char_across_chunk_boundary, text: "0123456789abcdeΔ123456789abcdef01234", source_file_start_pos: 0, lines: vec![0], multi_byte_chars: vec![(15, 2)], non_narrow_chars: vec![], ); test!( case: multi_byte_char_across_chunk_boundary_tail, text: "0123456789abcdeΔ....", source_file_start_pos: 0, lines: vec![0], multi_byte_chars: vec![(15, 2)], non_narrow_chars: vec![], ); test!( case: non_narrow_short, text: "0\t2", source_file_start_pos: 0, lines: vec![0], multi_byte_chars: vec![], non_narrow_chars: vec![(1, 4)], ); test!( case: non_narrow_long, text: "01\t3456789abcdef01234567\u{07}9", source_file_start_pos: 0, lines: vec![0], multi_byte_chars: vec![], non_narrow_chars: vec![(2, 4), (24, 0)], ); test!( case: output_offset_all, text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf", source_file_start_pos: 1000, lines: vec![0 + 1000, 7 + 1000, 27 + 1000], multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)], non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)], );
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax_pos/symbol.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! An "interner" is a data structure that associates values with usize tags and //! allows bidirectional lookup; i.e. given a value, one can easily find the //! type, and vice versa. use edition::Edition; use hygiene::SyntaxContext; use {Span, DUMMY_SP, GLOBALS}; use rustc_data_structures::fx::FxHashMap; use arena::DroplessArena; use serialize::{Decodable, Decoder, Encodable, Encoder}; use std::fmt; use std::str; use std::cmp::{PartialEq, Ordering, PartialOrd, Ord}; use std::hash::{Hash, Hasher}; #[derive(Copy, Clone, Eq)] pub struct Ident { pub name: Symbol, pub span: Span, } impl Ident { #[inline] pub const fn new(name: Symbol, span: Span) -> Ident { Ident { name, span } } #[inline] pub const fn with_empty_ctxt(name: Symbol) -> Ident { Ident::new(name, DUMMY_SP) } /// Maps an interned string to an identifier with an empty syntax context. pub fn from_interned_str(string: InternedString) -> Ident { Ident::with_empty_ctxt(string.as_symbol()) } /// Maps a string to an identifier with an empty syntax context. pub fn from_str(string: &str) -> Ident { Ident::with_empty_ctxt(Symbol::intern(string)) } /// Replace `lo` and `hi` with those from `span`, but keep hygiene context. pub fn with_span_pos(self, span: Span) -> Ident { Ident::new(self.name, span.with_ctxt(self.span.ctxt())) } pub fn without_first_quote(self) -> Ident { Ident::new(Symbol::intern(self.as_str().trim_left_matches('\'')), self.span) } /// "Normalize" ident for use in comparisons using "item hygiene". /// Identifiers with same string value become same if they came from the same "modern" macro /// (e.g. `macro` item, but not `macro_rules` item) and stay different if they came from /// different "modern" macros. /// Technically, this operation strips all non-opaque marks from ident's syntactic context. pub fn modern(self) -> Ident { Ident::new(self.name, self.span.modern()) } /// "Normalize" ident for use in comparisons using "local variable hygiene". /// Identifiers with same string value become same if they came from the same non-transparent /// macro (e.g. `macro` or `macro_rules!` items) and stay different if they came from different /// non-transparent macros. /// Technically, this operation strips all transparent marks from ident's syntactic context. pub fn modern_and_legacy(self) -> Ident { Ident::new(self.name, self.span.modern_and_legacy()) } pub fn gensym(self) -> Ident { Ident::new(self.name.gensymed(), self.span) } pub fn as_str(self) -> LocalInternedString { self.name.as_str() } pub fn as_interned_str(self) -> InternedString { self.name.as_interned_str() } } impl PartialEq for Ident { fn eq(&self, rhs: &Self) -> bool { self.name == rhs.name && self.span.ctxt() == rhs.span.ctxt() } } impl Hash for Ident { fn hash<H: Hasher>(&self, state: &mut H) { self.name.hash(state); self.span.ctxt().hash(state); } } impl fmt::Debug for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}{:?}", self.name, self.span.ctxt()) } } impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.name, f) } } impl Encodable for Ident { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { if self.span.ctxt().modern() == SyntaxContext::empty() { s.emit_str(&self.as_str()) } else { // FIXME(jseyfried) intercrate hygiene let mut string = "#".to_owned(); string.push_str(&self.as_str()); s.emit_str(&string) } } } impl Decodable for Ident { fn decode<D: Decoder>(d: &mut D) -> Result<Ident, D::Error> { let string = d.read_str()?; Ok(if !string.starts_with('#') { Ident::from_str(&string) } else { // FIXME(jseyfried) intercrate hygiene Ident::with_empty_ctxt(Symbol::gensym(&string[1..])) }) } } /// A symbol is an interned or gensymed string. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Symbol(u32); // The interner is pointed to by a thread local value which is only set on the main thread // with parallelization is disabled. So we don't allow Symbol to transfer between threads // to avoid panics and other errors, even though it would be memory safe to do so. #[cfg(not(parallel_queries))] impl !Send for Symbol { } #[cfg(not(parallel_queries))] impl !Sync for Symbol { } impl Symbol { /// Maps a string to its interned representation. pub fn intern(string: &str) -> Self { with_interner(|interner| interner.intern(string)) } pub fn interned(self) -> Self { with_interner(|interner| interner.interned(self)) } /// gensym's a new usize, using the current interner. pub fn gensym(string: &str) -> Self { with_interner(|interner| interner.gensym(string)) } pub fn gensymed(self) -> Self { with_interner(|interner| interner.gensymed(self)) } pub fn as_str(self) -> LocalInternedString { with_interner(|interner| unsafe { LocalInternedString { string: ::std::mem::transmute::<&str, &str>(interner.get(self)) } }) } pub fn as_interned_str(self) -> InternedString { with_interner(|interner| InternedString { symbol: interner.interned(self) }) } pub fn as_u32(self) -> u32 { self.0 } } impl fmt::Debug for Symbol { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let is_gensymed = with_interner(|interner| interner.is_gensymed(*self)); if is_gensymed { write!(f, "{}({})", self, self.0) } else { write!(f, "{}", self) } } } impl fmt::Display for Symbol { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.as_str(), f) } } impl Encodable for Symbol { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_str(&self.as_str()) } } impl Decodable for Symbol { fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> { Ok(Symbol::intern(&d.read_str()?)) } } impl<T: ::std::ops::Deref<Target=str>> PartialEq<T> for Symbol { fn eq(&self, other: &T) -> bool { self.as_str() == other.deref() } } // The &'static strs in this type actually point into the arena pub struct Interner { arena: DroplessArena, names: FxHashMap<&'static str, Symbol>, strings: Vec<&'static str>, gensyms: Vec<Symbol>, } impl Interner { pub fn new() -> Self { Interner { arena: DroplessArena::new(), names: Default::default(), strings: Default::default(), gensyms: Default::default(), } } fn prefill(init: &[&str]) -> Self { let mut this = Interner::new(); for &string in init { if string == "" { // We can't allocate empty strings in the arena, so handle this here let name = Symbol(this.strings.len() as u32); this.names.insert("", name); this.strings.push(""); } else { this.intern(string); } } this } pub fn intern(&mut self, string: &str) -> Symbol { if let Some(&name) = self.names.get(string) { return name; } let name = Symbol(self.strings.len() as u32); // from_utf8_unchecked is safe since we just allocated a &str which is known to be utf8 let string: &str = unsafe { str::from_utf8_unchecked(self.arena.alloc_slice(string.as_bytes())) }; // It is safe to extend the arena allocation to 'static because we only access // these while the arena is still alive let string: &'static str = unsafe { &*(string as *const str) }; self.strings.push(string); self.names.insert(string, name); name } pub fn interned(&self, symbol: Symbol) -> Symbol { if (symbol.0 as usize) < self.strings.len() { symbol } else { self.interned(self.gensyms[(!0 - symbol.0) as usize]) } } fn gensym(&mut self, string: &str) -> Symbol { let symbol = self.intern(string); self.gensymed(symbol) } fn gensymed(&mut self, symbol: Symbol) -> Symbol { self.gensyms.push(symbol); Symbol(!0 - self.gensyms.len() as u32 + 1) } fn is_gensymed(&mut self, symbol: Symbol) -> bool { symbol.0 as usize >= self.strings.len() } pub fn get(&self, symbol: Symbol) -> &str { match self.strings.get(symbol.0 as usize) { Some(string) => string, None => self.get(self.gensyms[(!0 - symbol.0) as usize]), } } } // In this macro, there is the requirement that the name (the number) must be monotonically // increasing by one in the special identifiers, starting at 0; the same holds for the keywords, // except starting from the next number instead of zero. macro_rules! declare_keywords {( $( ($index: expr, $konst: ident, $string: expr) )* ) => { pub mod keywords { use super::{Symbol, Ident}; #[derive(Clone, Copy, PartialEq, Eq)] pub struct Keyword { ident: Ident, } impl Keyword { #[inline] pub fn ident(self) -> Ident { self.ident } #[inline] pub fn name(self) -> Symbol { self.ident.name } } $( #[allow(non_upper_case_globals)] pub const $konst: Keyword = Keyword { ident: Ident::with_empty_ctxt(super::Symbol($index)) }; )* impl ::std::str::FromStr for Keyword { type Err = (); fn from_str(s: &str) -> Result<Self, ()> { match s { $($string => Ok($konst),)* _ => Err(()), } } } } impl Interner { pub fn fresh() -> Self { Interner::prefill(&[$($string,)*]) } } }} // NB: leaving holes in the ident table is bad! a different ident will get // interned with the id from the hole, but it will be between the min and max // of the reserved words, and thus tagged as "reserved". // After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`, // this should be rarely necessary though if the keywords are kept in alphabetic order. declare_keywords! { // Special reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. (0, Invalid, "") (1, CrateRoot, "{{root}}") (2, DollarCrate, "$crate") (3, Underscore, "_") // Keywords used in the language. (4, As, "as") (5, Box, "box") (6, Break, "break") (7, Const, "const") (8, Continue, "continue") (9, Crate, "crate") (10, Else, "else") (11, Enum, "enum") (12, Extern, "extern") (13, False, "false") (14, Fn, "fn") (15, For, "for") (16, If, "if") (17, Impl, "impl") (18, In, "in") (19, Let, "let") (20, Loop, "loop") (21, Match, "match") (22, Mod, "mod") (23, Move, "move") (24, Mut, "mut") (25, Pub, "pub") (26, Ref, "ref") (27, Return, "return") (28, SelfValue, "self") (29, SelfType, "Self") (30, Static, "static") (31, Struct, "struct") (32, Super, "super") (33, Trait, "trait") (34, True, "true") (35, Type, "type") (36, Unsafe, "unsafe") (37, Use, "use") (38, Where, "where") (39, While, "while") // Keywords reserved for future use. (40, Abstract, "abstract") (41, Become, "become") (42, Do, "do") (43, Final, "final") (44, Macro, "macro") (45, Override, "override") (46, Priv, "priv") (47, Typeof, "typeof") (48, Unsized, "unsized") (49, Virtual, "virtual") (50, Yield, "yield") // Edition-specific keywords reserved for future use. (51, Async, "async") // >= 2018 Edition Only // Special lifetime names (52, UnderscoreLifetime, "'_") (53, StaticLifetime, "'static") // Weak keywords, have special meaning only in specific contexts. (54, Auto, "auto") (55, Catch, "catch") (56, Default, "default") (57, Dyn, "dyn") (58, Union, "union") (59, Existential, "existential") } impl Symbol { fn is_unused_keyword_2018(self) -> bool { self == keywords::Async.name() } } impl Ident { // Returns true for reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. pub fn is_special(self) -> bool { self.name <= keywords::Underscore.name() } /// Returns `true` if the token is a keyword used in the language. pub fn is_used_keyword(self) -> bool { self.name >= keywords::As.name() && self.name <= keywords::While.name() } /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_unused_keyword(self) -> bool { // Note: `span.edition()` is relatively expensive, don't call it unless necessary. self.name >= keywords::Abstract.name() && self.name <= keywords::Yield.name() || self.name.is_unused_keyword_2018() && self.span.edition() == Edition::Edition2018 } /// Returns `true` if the token is either a special identifier or a keyword. pub fn is_reserved(self) -> bool { self.is_special() || self.is_used_keyword() || self.is_unused_keyword() } /// A keyword or reserved identifier that can be used as a path segment. pub fn is_path_segment_keyword(self) -> bool { self.name == keywords::Super.name() || self.name == keywords::SelfValue.name() || self.name == keywords::SelfType.name() || self.name == keywords::Extern.name() || self.name == keywords::Crate.name() || self.name == keywords::CrateRoot.name() || self.name == keywords::DollarCrate.name() } // We see this identifier in a normal identifier position, like variable name or a type. // How was it written originally? Did it use the raw form? Let's try to guess. pub fn is_raw_guess(self) -> bool { self.name != keywords::Invalid.name() && self.is_reserved() && !self.is_path_segment_keyword() } } // If an interner exists, return it. Otherwise, prepare a fresh one. #[inline] fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T { GLOBALS.with(|globals| f(&mut *globals.symbol_interner.lock())) } /// Represents a string stored in the interner. Because the interner outlives any thread /// which uses this type, we can safely treat `string` which points to interner data, /// as an immortal string, as long as this type never crosses between threads. // FIXME: Ensure that the interner outlives any thread which uses LocalInternedString, // by creating a new thread right after constructing the interner #[derive(Clone, Copy, Hash, PartialOrd, Eq, Ord)] pub struct LocalInternedString { string: &'static str, } impl LocalInternedString { pub fn as_interned_str(self) -> InternedString { InternedString { symbol: Symbol::intern(self.string) } } } impl<U: ?Sized> ::std::convert::AsRef<U> for LocalInternedString where str: ::std::convert::AsRef<U> { fn as_ref(&self) -> &U { self.string.as_ref() } } impl<T: ::std::ops::Deref<Target = str>> ::std::cmp::PartialEq<T> for LocalInternedString { fn eq(&self, other: &T) -> bool { self.string == other.deref() } } impl ::std::cmp::PartialEq<LocalInternedString> for str { fn eq(&self, other: &LocalInternedString) -> bool { self == other.string } } impl<'a> ::std::cmp::PartialEq<LocalInternedString> for &'a str { fn eq(&self, other: &LocalInternedString) -> bool { *self == other.string } } impl ::std::cmp::PartialEq<LocalInternedString> for String { fn eq(&self, other: &LocalInternedString) -> bool { self == other.string } } impl<'a> ::std::cmp::PartialEq<LocalInternedString> for &'a String { fn eq(&self, other: &LocalInternedString) -> bool { *self == other.string } } impl !Send for LocalInternedString {} impl !Sync for LocalInternedString {} impl ::std::ops::Deref for LocalInternedString { type Target = str; fn deref(&self) -> &str { self.string } } impl fmt::Debug for LocalInternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(self.string, f) } } impl fmt::Display for LocalInternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self.string, f) } } impl Decodable for LocalInternedString { fn decode<D: Decoder>(d: &mut D) -> Result<LocalInternedString, D::Error> { Ok(Symbol::intern(&d.read_str()?).as_str()) } } impl Encodable for LocalInternedString { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_str(self.string) } } /// Represents a string stored in the string interner #[derive(Clone, Copy, Eq)] pub struct InternedString { symbol: Symbol, } impl InternedString { pub fn with<F: FnOnce(&str) -> R, R>(self, f: F) -> R { let str = with_interner(|interner| { interner.get(self.symbol) as *const str }); // This is safe because the interner keeps string alive until it is dropped. // We can access it because we know the interner is still alive since we use a // scoped thread local to access it, and it was alive at the beginning of this scope unsafe { f(&*str) } } pub fn as_symbol(self) -> Symbol { self.symbol } pub fn as_str(self) -> LocalInternedString { self.symbol.as_str() } } impl Hash for InternedString { fn hash<H: Hasher>(&self, state: &mut H) { self.with(|str| str.hash(state)) } } impl PartialOrd<InternedString> for InternedString { fn partial_cmp(&self, other: &InternedString) -> Option<Ordering> { if self.symbol == other.symbol { return Some(Ordering::Equal); } self.with(|self_str| other.with(|other_str| self_str.partial_cmp(other_str))) } } impl Ord for InternedString { fn cmp(&self, other: &InternedString) -> Ordering { if self.symbol == other.symbol { return Ordering::Equal; } self.with(|self_str| other.with(|other_str| self_str.cmp(&other_str))) } } impl<T: ::std::ops::Deref<Target = str>> PartialEq<T> for InternedString { fn eq(&self, other: &T) -> bool { self.with(|string| string == other.deref()) } } impl PartialEq<InternedString> for InternedString { fn eq(&self, other: &InternedString) -> bool { self.symbol == other.symbol } } impl PartialEq<InternedString> for str { fn eq(&self, other: &InternedString) -> bool { other.with(|string| self == string) } } impl<'a> PartialEq<InternedString> for &'a str { fn eq(&self, other: &InternedString) -> bool { other.with(|string| *self == string) } } impl PartialEq<InternedString> for String { fn eq(&self, other: &InternedString) -> bool { other.with(|string| self == string) } } impl<'a> PartialEq<InternedString> for &'a String { fn eq(&self, other: &InternedString) -> bool { other.with(|string| *self == string) } } impl ::std::convert::From<InternedString> for String { fn from(val: InternedString) -> String { val.as_symbol().to_string() } } impl fmt::Debug for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.with(|str| fmt::Debug::fmt(&str, f)) } } impl fmt::Display for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.with(|str| fmt::Display::fmt(&str, f)) } } impl Decodable for InternedString { fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> { Ok(Symbol::intern(&d.read_str()?).as_interned_str()) } } impl Encodable for InternedString { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { self.with(|string| s.emit_str(string)) } } #[cfg(test)] mod tests { use super::*; use Globals; #[test] fn interner_tests() { let mut i: Interner = Interner::new(); // first one is zero: assert_eq!(i.intern("dog"), Symbol(0)); // re-use gets the same entry: assert_eq!(i.intern("dog"), Symbol(0)); // different string gets a different #: assert_eq!(i.intern("cat"), Symbol(1)); assert_eq!(i.intern("cat"), Symbol(1)); // dog is still at zero assert_eq!(i.intern("dog"), Symbol(0)); assert_eq!(i.gensym("zebra"), Symbol(4294967295)); // gensym of same string gets new number : assert_eq!(i.gensym("zebra"), Symbol(4294967294)); // gensym of *existing* string gets new number: assert_eq!(i.gensym("dog"), Symbol(4294967293)); } #[test] fn without_first_quote_test() { GLOBALS.set(&Globals::new(), || { let i = Ident::from_str("'break"); assert_eq!(i.without_first_quote().name, keywords::Break.name()); }); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax_pos/Cargo.toml
[package] authors = ["The Rust Project Developers"] name = "syntax_pos" version = "0.0.0" [lib] name = "syntax_pos" path = "lib.rs" [dependencies] serialize = { path = "../libserialize" } rustc_data_structures = { path = "../librustc_data_structures" } arena = { path = "../libarena" } scoped-tls = { version = "0.1.1", features = ["nightly"] } unicode-width = "0.1.4" cfg-if = "0.1.2"
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax_pos/span_encoding.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Spans are encoded using 1-bit tag and 2 different encoding formats (one for each tag value). // One format is used for keeping span data inline, // another contains index into an out-of-line span interner. // The encoding format for inline spans were obtained by optimizing over crates in rustc/libstd. // See https://internals.rust-lang.org/t/rfc-compiler-refactoring-spans/1357/28 use GLOBALS; use {BytePos, SpanData}; use hygiene::SyntaxContext; use rustc_data_structures::fx::FxHashMap; use std::hash::{Hash, Hasher}; /// A compressed span. /// Contains either fields of `SpanData` inline if they are small, or index into span interner. /// The primary goal of `Span` is to be as small as possible and fit into other structures /// (that's why it uses `packed` as well). Decoding speed is the second priority. /// See `SpanData` for the info on span fields in decoded representation. #[repr(packed)] pub struct Span(u32); impl Copy for Span {} impl Clone for Span { #[inline] fn clone(&self) -> Span { *self } } impl PartialEq for Span { #[inline] fn eq(&self, other: &Span) -> bool { let a = self.0; let b = other.0; a == b } } impl Eq for Span {} impl Hash for Span { #[inline] fn hash<H: Hasher>(&self, state: &mut H) { let a = self.0; a.hash(state) } } /// Dummy span, both position and length are zero, syntax context is zero as well. /// This span is kept inline and encoded with format 0. pub const DUMMY_SP: Span = Span(0); impl Span { #[inline] pub fn new(lo: BytePos, hi: BytePos, ctxt: SyntaxContext) -> Self { encode(&match lo <= hi { true => SpanData { lo, hi, ctxt }, false => SpanData { lo: hi, hi: lo, ctxt }, }) } #[inline] pub fn data(self) -> SpanData { decode(self) } } // Tags const TAG_INLINE: u32 = 0; const TAG_INTERNED: u32 = 1; const TAG_MASK: u32 = 1; // Fields indexes const BASE_INDEX: usize = 0; const LEN_INDEX: usize = 1; const CTXT_INDEX: usize = 2; // Tag = 0, inline format. // ------------------------------------------------------------- // | base 31:8 | len 7:1 | ctxt (currently 0 bits) | tag 0:0 | // ------------------------------------------------------------- // Since there are zero bits for ctxt, only SpanData with a 0 SyntaxContext // can be inline. const INLINE_SIZES: [u32; 3] = [24, 7, 0]; const INLINE_OFFSETS: [u32; 3] = [8, 1, 1]; // Tag = 1, interned format. // ------------------------ // | index 31:1 | tag 0:0 | // ------------------------ const INTERNED_INDEX_SIZE: u32 = 31; const INTERNED_INDEX_OFFSET: u32 = 1; #[inline] fn encode(sd: &SpanData) -> Span { let (base, len, ctxt) = (sd.lo.0, sd.hi.0 - sd.lo.0, sd.ctxt.as_u32()); let val = if (base >> INLINE_SIZES[BASE_INDEX]) == 0 && (len >> INLINE_SIZES[LEN_INDEX]) == 0 && (ctxt >> INLINE_SIZES[CTXT_INDEX]) == 0 { (base << INLINE_OFFSETS[BASE_INDEX]) | (len << INLINE_OFFSETS[LEN_INDEX]) | (ctxt << INLINE_OFFSETS[CTXT_INDEX]) | TAG_INLINE } else { let index = with_span_interner(|interner| interner.intern(sd)); (index << INTERNED_INDEX_OFFSET) | TAG_INTERNED }; Span(val) } #[inline] fn decode(span: Span) -> SpanData { let val = span.0; // Extract a field at position `pos` having size `size`. let extract = |pos: u32, size: u32| { let mask = ((!0u32) as u64 >> (32 - size)) as u32; // Can't shift u32 by 32 (val >> pos) & mask }; let (base, len, ctxt) = if val & TAG_MASK == TAG_INLINE {( extract(INLINE_OFFSETS[BASE_INDEX], INLINE_SIZES[BASE_INDEX]), extract(INLINE_OFFSETS[LEN_INDEX], INLINE_SIZES[LEN_INDEX]), extract(INLINE_OFFSETS[CTXT_INDEX], INLINE_SIZES[CTXT_INDEX]), )} else { let index = extract(INTERNED_INDEX_OFFSET, INTERNED_INDEX_SIZE); return with_span_interner(|interner| *interner.get(index)); }; SpanData { lo: BytePos(base), hi: BytePos(base + len), ctxt: SyntaxContext::from_u32(ctxt) } } #[derive(Default)] pub struct SpanInterner { spans: FxHashMap<SpanData, u32>, span_data: Vec<SpanData>, } impl SpanInterner { fn intern(&mut self, span_data: &SpanData) -> u32 { if let Some(index) = self.spans.get(span_data) { return *index; } let index = self.spans.len() as u32; self.span_data.push(*span_data); self.spans.insert(*span_data, index); index } #[inline] fn get(&self, index: u32) -> &SpanData { &self.span_data[index as usize] } } // If an interner exists, return it. Otherwise, prepare a fresh one. #[inline] fn with_span_interner<T, F: FnOnce(&mut SpanInterner) -> T>(f: F) -> T { GLOBALS.with(|globals| f(&mut *globals.span_interner.lock())) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax_pos/lib.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The source positions and related helper functions //! //! # Note //! //! This API is completely unstable and subject to change. #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(const_fn)] #![feature(crate_visibility_modifier)] #![feature(custom_attribute)] #![cfg_attr(not(stage0), feature(nll))] #![feature(non_exhaustive)] #![feature(optin_builtin_traits)] #![feature(specialization)] #![feature(stdsimd)] use std::borrow::Cow; use std::cell::Cell; use std::cmp::{self, Ordering}; use std::fmt; use std::hash::{Hasher, Hash}; use std::ops::{Add, Sub}; use std::path::PathBuf; use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::sync::{Lrc, Lock}; extern crate arena; extern crate rustc_data_structures; #[macro_use] extern crate scoped_tls; use serialize::{Encodable, Decodable, Encoder, Decoder}; extern crate serialize; extern crate serialize as rustc_serialize; // used by deriving #[macro_use] extern crate cfg_if; extern crate unicode_width; pub mod edition; pub mod hygiene; pub use hygiene::{Mark, SyntaxContext, ExpnInfo, ExpnFormat, CompilerDesugaringKind}; mod span_encoding; pub use span_encoding::{Span, DUMMY_SP}; pub mod symbol; mod analyze_source_file; pub struct Globals { symbol_interner: Lock<symbol::Interner>, span_interner: Lock<span_encoding::SpanInterner>, hygiene_data: Lock<hygiene::HygieneData>, } impl Globals { pub fn new() -> Globals { Globals { symbol_interner: Lock::new(symbol::Interner::fresh()), span_interner: Lock::new(span_encoding::SpanInterner::default()), hygiene_data: Lock::new(hygiene::HygieneData::new()), } } } scoped_thread_local!(pub static GLOBALS: Globals); /// Differentiates between real files and common virtual files #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)] pub enum FileName { Real(PathBuf), /// e.g. "std" macros Macros(String), /// call to `quote!` QuoteExpansion, /// Command line Anon, /// Hack in src/libsyntax/parse.rs /// FIXME(jseyfried) MacroExpansion, ProcMacroSourceCode, /// Strings provided as --cfg [cfgspec] stored in a crate_cfg CfgSpec, /// Strings provided as crate attributes in the CLI CliCrateAttr, /// Custom sources for explicit parser calls from plugins and drivers Custom(String), } impl std::fmt::Display for FileName { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { use self::FileName::*; match *self { Real(ref path) => write!(fmt, "{}", path.display()), Macros(ref name) => write!(fmt, "<{} macros>", name), QuoteExpansion => write!(fmt, "<quote expansion>"), MacroExpansion => write!(fmt, "<macro expansion>"), Anon => write!(fmt, "<anon>"), ProcMacroSourceCode => write!(fmt, "<proc-macro source code>"), CfgSpec => write!(fmt, "cfgspec"), CliCrateAttr => write!(fmt, "<crate attribute>"), Custom(ref s) => write!(fmt, "<{}>", s), } } } impl From<PathBuf> for FileName { fn from(p: PathBuf) -> Self { assert!(!p.to_string_lossy().ends_with('>')); FileName::Real(p) } } impl FileName { pub fn is_real(&self) -> bool { use self::FileName::*; match *self { Real(_) => true, Macros(_) | Anon | MacroExpansion | ProcMacroSourceCode | CfgSpec | CliCrateAttr | Custom(_) | QuoteExpansion => false, } } pub fn is_macros(&self) -> bool { use self::FileName::*; match *self { Real(_) | Anon | MacroExpansion | ProcMacroSourceCode | CfgSpec | CliCrateAttr | Custom(_) | QuoteExpansion => false, Macros(_) => true, } } } /// Spans represent a region of code, used for error reporting. Positions in spans /// are *absolute* positions from the beginning of the source_map, not positions /// relative to SourceFiles. Methods on the SourceMap can be used to relate spans back /// to the original source. /// You must be careful if the span crosses more than one file - you will not be /// able to use many of the functions on spans in source_map and you cannot assume /// that the length of the span = hi - lo; there may be space in the BytePos /// range between files. /// /// `SpanData` is public because `Span` uses a thread-local interner and can't be /// sent to other threads, but some pieces of performance infra run in a separate thread. /// Using `Span` is generally preferred. #[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)] pub struct SpanData { pub lo: BytePos, pub hi: BytePos, /// Information about where the macro came from, if this piece of /// code was created by a macro expansion. pub ctxt: SyntaxContext, } impl SpanData { #[inline] pub fn with_lo(&self, lo: BytePos) -> Span { Span::new(lo, self.hi, self.ctxt) } #[inline] pub fn with_hi(&self, hi: BytePos) -> Span { Span::new(self.lo, hi, self.ctxt) } #[inline] pub fn with_ctxt(&self, ctxt: SyntaxContext) -> Span { Span::new(self.lo, self.hi, ctxt) } } // The interner is pointed to by a thread local value which is only set on the main thread // with parallelization is disabled. So we don't allow Span to transfer between threads // to avoid panics and other errors, even though it would be memory safe to do so. #[cfg(not(parallel_queries))] impl !Send for Span {} #[cfg(not(parallel_queries))] impl !Sync for Span {} impl PartialOrd for Span { fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> { PartialOrd::partial_cmp(&self.data(), &rhs.data()) } } impl Ord for Span { fn cmp(&self, rhs: &Self) -> Ordering { Ord::cmp(&self.data(), &rhs.data()) } } /// A collection of spans. Spans have two orthogonal attributes: /// /// - they can be *primary spans*. In this case they are the locus of /// the error, and would be rendered with `^^^`. /// - they can have a *label*. In this case, the label is written next /// to the mark in the snippet when we render. #[derive(Clone, Debug, Hash, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub struct MultiSpan { primary_spans: Vec<Span>, span_labels: Vec<(Span, String)>, } impl Span { #[inline] pub fn lo(self) -> BytePos { self.data().lo } #[inline] pub fn with_lo(self, lo: BytePos) -> Span { self.data().with_lo(lo) } #[inline] pub fn hi(self) -> BytePos { self.data().hi } #[inline] pub fn with_hi(self, hi: BytePos) -> Span { self.data().with_hi(hi) } #[inline] pub fn ctxt(self) -> SyntaxContext { self.data().ctxt } #[inline] pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span { self.data().with_ctxt(ctxt) } /// Returns `true` if this is a dummy span with any hygienic context. #[inline] pub fn is_dummy(self) -> bool { let span = self.data(); span.lo.0 == 0 && span.hi.0 == 0 } /// Returns a new span representing an empty span at the beginning of this span #[inline] pub fn shrink_to_lo(self) -> Span { let span = self.data(); span.with_hi(span.lo) } /// Returns a new span representing an empty span at the end of this span #[inline] pub fn shrink_to_hi(self) -> Span { let span = self.data(); span.with_lo(span.hi) } /// Returns `self` if `self` is not the dummy span, and `other` otherwise. pub fn substitute_dummy(self, other: Span) -> Span { if self.is_dummy() { other } else { self } } /// Return true if `self` fully encloses `other`. pub fn contains(self, other: Span) -> bool { let span = self.data(); let other = other.data(); span.lo <= other.lo && other.hi <= span.hi } /// Return true if the spans are equal with regards to the source text. /// /// Use this instead of `==` when either span could be generated code, /// and you only care that they point to the same bytes of source text. pub fn source_equal(&self, other: &Span) -> bool { let span = self.data(); let other = other.data(); span.lo == other.lo && span.hi == other.hi } /// Returns `Some(span)`, where the start is trimmed by the end of `other` pub fn trim_start(self, other: Span) -> Option<Span> { let span = self.data(); let other = other.data(); if span.hi > other.hi { Some(span.with_lo(cmp::max(span.lo, other.hi))) } else { None } } /// Return the source span - this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(self) -> Span { self.ctxt().outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) } /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, /// if any pub fn parent(self) -> Option<Span> { self.ctxt().outer().expn_info().map(|i| i.call_site) } /// Edition of the crate from which this span came. pub fn edition(self) -> edition::Edition { self.ctxt().outer().expn_info().map_or_else(|| hygiene::default_edition(), |einfo| einfo.edition) } /// Return the source callee. /// /// Returns `None` if the supplied span has no expansion trace, /// else returns the `ExpnInfo` for the macro definition /// corresponding to the source callsite. pub fn source_callee(self) -> Option<ExpnInfo> { fn source_callee(info: ExpnInfo) -> ExpnInfo { match info.call_site.ctxt().outer().expn_info() { Some(info) => source_callee(info), None => info, } } self.ctxt().outer().expn_info().map(source_callee) } /// Check if a span is "internal" to a macro in which #[unstable] /// items can be used (that is, a macro marked with /// `#[allow_internal_unstable]`). pub fn allows_unstable(&self) -> bool { match self.ctxt().outer().expn_info() { Some(info) => info.allow_internal_unstable, None => false, } } /// Check if this span arises from a compiler desugaring of kind `kind`. pub fn is_compiler_desugaring(&self, kind: CompilerDesugaringKind) -> bool { match self.ctxt().outer().expn_info() { Some(info) => match info.format { ExpnFormat::CompilerDesugaring(k) => k == kind, _ => false, }, None => false, } } /// Return the compiler desugaring that created this span, or None /// if this span is not from a desugaring. pub fn compiler_desugaring_kind(&self) -> Option<CompilerDesugaringKind> { match self.ctxt().outer().expn_info() { Some(info) => match info.format { ExpnFormat::CompilerDesugaring(k) => Some(k), _ => None }, None => None } } /// Check if a span is "internal" to a macro in which `unsafe` /// can be used without triggering the `unsafe_code` lint // (that is, a macro marked with `#[allow_internal_unsafe]`). pub fn allows_unsafe(&self) -> bool { match self.ctxt().outer().expn_info() { Some(info) => info.allow_internal_unsafe, None => false, } } pub fn macro_backtrace(mut self) -> Vec<MacroBacktrace> { let mut prev_span = DUMMY_SP; let mut result = vec![]; while let Some(info) = self.ctxt().outer().expn_info() { // Don't print recursive invocations if !info.call_site.source_equal(&prev_span) { let (pre, post) = match info.format { ExpnFormat::MacroAttribute(..) => ("#[", "]"), ExpnFormat::MacroBang(..) => ("", "!"), ExpnFormat::CompilerDesugaring(..) => ("desugaring of `", "`"), }; result.push(MacroBacktrace { call_site: info.call_site, macro_decl_name: format!("{}{}{}", pre, info.format.name(), post), def_site_span: info.def_site, }); } prev_span = self; self = info.call_site; } result } /// Return a `Span` that would enclose both `self` and `end`. pub fn to(self, end: Span) -> Span { let span_data = self.data(); let end_data = end.data(); // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480) // Return the macro span on its own to avoid weird diagnostic output. It is preferable to // have an incomplete span than a completely nonsensical one. if span_data.ctxt != end_data.ctxt { if span_data.ctxt == SyntaxContext::empty() { return end; } else if end_data.ctxt == SyntaxContext::empty() { return self; } // both span fall within a macro // FIXME(estebank) check if it is the *same* macro } Span::new( cmp::min(span_data.lo, end_data.lo), cmp::max(span_data.hi, end_data.hi), if span_data.ctxt == SyntaxContext::empty() { end_data.ctxt } else { span_data.ctxt }, ) } /// Return a `Span` between the end of `self` to the beginning of `end`. pub fn between(self, end: Span) -> Span { let span = self.data(); let end = end.data(); Span::new( span.hi, end.lo, if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt }, ) } /// Return a `Span` between the beginning of `self` to the beginning of `end`. pub fn until(self, end: Span) -> Span { let span = self.data(); let end = end.data(); Span::new( span.lo, end.lo, if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt }, ) } pub fn from_inner_byte_pos(self, start: usize, end: usize) -> Span { let span = self.data(); Span::new(span.lo + BytePos::from_usize(start), span.lo + BytePos::from_usize(end), span.ctxt) } #[inline] pub fn apply_mark(self, mark: Mark) -> Span { let span = self.data(); span.with_ctxt(span.ctxt.apply_mark(mark)) } #[inline] pub fn remove_mark(&mut self) -> Mark { let mut span = self.data(); let mark = span.ctxt.remove_mark(); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn adjust(&mut self, expansion: Mark) -> Option<Mark> { let mut span = self.data(); let mark = span.ctxt.adjust(expansion); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn glob_adjust(&mut self, expansion: Mark, glob_ctxt: SyntaxContext) -> Option<Option<Mark>> { let mut span = self.data(); let mark = span.ctxt.glob_adjust(expansion, glob_ctxt); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn reverse_glob_adjust(&mut self, expansion: Mark, glob_ctxt: SyntaxContext) -> Option<Option<Mark>> { let mut span = self.data(); let mark = span.ctxt.reverse_glob_adjust(expansion, glob_ctxt); *self = Span::new(span.lo, span.hi, span.ctxt); mark } #[inline] pub fn modern(self) -> Span { let span = self.data(); span.with_ctxt(span.ctxt.modern()) } #[inline] pub fn modern_and_legacy(self) -> Span { let span = self.data(); span.with_ctxt(span.ctxt.modern_and_legacy()) } } #[derive(Clone, Debug)] pub struct SpanLabel { /// The span we are going to include in the final snippet. pub span: Span, /// Is this a primary span? This is the "locus" of the message, /// and is indicated with a `^^^^` underline, versus `----`. pub is_primary: bool, /// What label should we attach to this span (if any)? pub label: Option<String>, } impl Default for Span { fn default() -> Self { DUMMY_SP } } impl serialize::UseSpecializedEncodable for Span { fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { let span = self.data(); s.emit_struct("Span", 2, |s| { s.emit_struct_field("lo", 0, |s| { span.lo.encode(s) })?; s.emit_struct_field("hi", 1, |s| { span.hi.encode(s) }) }) } } impl serialize::UseSpecializedDecodable for Span { fn default_decode<D: Decoder>(d: &mut D) -> Result<Span, D::Error> { d.read_struct("Span", 2, |d| { let lo = d.read_struct_field("lo", 0, Decodable::decode)?; let hi = d.read_struct_field("hi", 1, Decodable::decode)?; Ok(Span::new(lo, hi, NO_EXPANSION)) }) } } fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Span") .field("lo", &span.lo()) .field("hi", &span.hi()) .field("ctxt", &span.ctxt()) .finish() } impl fmt::Debug for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { SPAN_DEBUG.with(|span_debug| span_debug.get()(*self, f)) } } impl fmt::Debug for SpanData { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { SPAN_DEBUG.with(|span_debug| span_debug.get()(Span::new(self.lo, self.hi, self.ctxt), f)) } } impl MultiSpan { pub fn new() -> MultiSpan { MultiSpan { primary_spans: vec![], span_labels: vec![] } } pub fn from_span(primary_span: Span) -> MultiSpan { MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] } } pub fn from_spans(vec: Vec<Span>) -> MultiSpan { MultiSpan { primary_spans: vec, span_labels: vec![] } } pub fn push_span_label(&mut self, span: Span, label: String) { self.span_labels.push((span, label)); } /// Selects the first primary span (if any) pub fn primary_span(&self) -> Option<Span> { self.primary_spans.first().cloned() } /// Returns all primary spans. pub fn primary_spans(&self) -> &[Span] { &self.primary_spans } /// Replaces all occurrences of one Span with another. Used to move Spans in areas that don't /// display well (like std macros). Returns true if replacements occurred. pub fn replace(&mut self, before: Span, after: Span) -> bool { let mut replacements_occurred = false; for primary_span in &mut self.primary_spans { if *primary_span == before { *primary_span = after; replacements_occurred = true; } } for span_label in &mut self.span_labels { if span_label.0 == before { span_label.0 = after; replacements_occurred = true; } } replacements_occurred } /// Returns the strings to highlight. We always ensure that there /// is an entry for each of the primary spans -- for each primary /// span P, if there is at least one label with span P, we return /// those labels (marked as primary). But otherwise we return /// `SpanLabel` instances with empty labels. pub fn span_labels(&self) -> Vec<SpanLabel> { let is_primary = |span| self.primary_spans.contains(&span); let mut span_labels = self.span_labels.iter().map(|&(span, ref label)| SpanLabel { span, is_primary: is_primary(span), label: Some(label.clone()) } ).collect::<Vec<_>>(); for &span in &self.primary_spans { if !span_labels.iter().any(|sl| sl.span == span) { span_labels.push(SpanLabel { span, is_primary: true, label: None }); } } span_labels } } impl From<Span> for MultiSpan { fn from(span: Span) -> MultiSpan { MultiSpan::from_span(span) } } impl From<Vec<Span>> for MultiSpan { fn from(spans: Vec<Span>) -> MultiSpan { MultiSpan::from_spans(spans) } } pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty(); /// Identifies an offset of a multi-byte character in a SourceFile #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)] pub struct MultiByteChar { /// The absolute offset of the character in the SourceMap pub pos: BytePos, /// The number of bytes, >=2 pub bytes: u8, } /// Identifies an offset of a non-narrow character in a SourceFile #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)] pub enum NonNarrowChar { /// Represents a zero-width character ZeroWidth(BytePos), /// Represents a wide (fullwidth) character Wide(BytePos), /// Represents a tab character, represented visually with a width of 4 characters Tab(BytePos), } impl NonNarrowChar { fn new(pos: BytePos, width: usize) -> Self { match width { 0 => NonNarrowChar::ZeroWidth(pos), 2 => NonNarrowChar::Wide(pos), 4 => NonNarrowChar::Tab(pos), _ => panic!("width {} given for non-narrow character", width), } } /// Returns the absolute offset of the character in the SourceMap pub fn pos(&self) -> BytePos { match *self { NonNarrowChar::ZeroWidth(p) | NonNarrowChar::Wide(p) | NonNarrowChar::Tab(p) => p, } } /// Returns the width of the character, 0 (zero-width) or 2 (wide) pub fn width(&self) -> usize { match *self { NonNarrowChar::ZeroWidth(_) => 0, NonNarrowChar::Wide(_) => 2, NonNarrowChar::Tab(_) => 4, } } } impl Add<BytePos> for NonNarrowChar { type Output = Self; fn add(self, rhs: BytePos) -> Self { match self { NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs), NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs), NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs), } } } impl Sub<BytePos> for NonNarrowChar { type Output = Self; fn sub(self, rhs: BytePos) -> Self { match self { NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs), NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs), NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs), } } } /// The state of the lazy external source loading mechanism of a SourceFile. #[derive(PartialEq, Eq, Clone)] pub enum ExternalSource { /// The external source has been loaded already. Present(String), /// No attempt has been made to load the external source. AbsentOk, /// A failed attempt has been made to load the external source. AbsentErr, /// No external source has to be loaded, since the SourceFile represents a local crate. Unneeded, } impl ExternalSource { pub fn is_absent(&self) -> bool { match *self { ExternalSource::Present(_) => false, _ => true, } } pub fn get_source(&self) -> Option<&str> { match *self { ExternalSource::Present(ref src) => Some(src), _ => None, } } } /// A single source in the SourceMap. #[derive(Clone)] pub struct SourceFile { /// The name of the file that the source came from, source that doesn't /// originate from files has names between angle brackets by convention, /// e.g. `<anon>` pub name: FileName, /// True if the `name` field above has been modified by --remap-path-prefix pub name_was_remapped: bool, /// The unmapped path of the file that the source came from. /// Set to `None` if the SourceFile was imported from an external crate. pub unmapped_path: Option<FileName>, /// Indicates which crate this SourceFile was imported from. pub crate_of_origin: u32, /// The complete source code pub src: Option<Lrc<String>>, /// The source code's hash pub src_hash: u128, /// The external source code (used for external crates, which will have a `None` /// value as `self.src`. pub external_src: Lock<ExternalSource>, /// The start position of this source in the SourceMap pub start_pos: BytePos, /// The end position of this source in the SourceMap pub end_pos: BytePos, /// Locations of lines beginnings in the source code pub lines: Vec<BytePos>, /// Locations of multi-byte characters in the source code pub multibyte_chars: Vec<MultiByteChar>, /// Width of characters that are not narrow in the source code pub non_narrow_chars: Vec<NonNarrowChar>, /// A hash of the filename, used for speeding up the incr. comp. hashing. pub name_hash: u128, } impl Encodable for SourceFile { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("SourceFile", 8, |s| { s.emit_struct_field("name", 0, |s| self.name.encode(s))?; s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?; s.emit_struct_field("src_hash", 2, |s| self.src_hash.encode(s))?; s.emit_struct_field("start_pos", 4, |s| self.start_pos.encode(s))?; s.emit_struct_field("end_pos", 5, |s| self.end_pos.encode(s))?; s.emit_struct_field("lines", 6, |s| { let lines = &self.lines[..]; // store the length s.emit_u32(lines.len() as u32)?; if !lines.is_empty() { // In order to preserve some space, we exploit the fact that // the lines list is sorted and individual lines are // probably not that long. Because of that we can store lines // as a difference list, using as little space as possible // for the differences. let max_line_length = if lines.len() == 1 { 0 } else { lines.windows(2) .map(|w| w[1] - w[0]) .map(|bp| bp.to_usize()) .max() .unwrap() }; let bytes_per_diff: u8 = match max_line_length { 0 ..= 0xFF => 1, 0x100 ..= 0xFFFF => 2, _ => 4 }; // Encode the number of bytes used per diff. bytes_per_diff.encode(s)?; // Encode the first element. lines[0].encode(s)?; let diff_iter = (&lines[..]).windows(2) .map(|w| (w[1] - w[0])); match bytes_per_diff { 1 => for diff in diff_iter { (diff.0 as u8).encode(s)? }, 2 => for diff in diff_iter { (diff.0 as u16).encode(s)? }, 4 => for diff in diff_iter { diff.0.encode(s)? }, _ => unreachable!() } } Ok(()) })?; s.emit_struct_field("multibyte_chars", 7, |s| { self.multibyte_chars.encode(s) })?; s.emit_struct_field("non_narrow_chars", 8, |s| { self.non_narrow_chars.encode(s) })?; s.emit_struct_field("name_hash", 9, |s| { self.name_hash.encode(s) }) }) } } impl Decodable for SourceFile { fn decode<D: Decoder>(d: &mut D) -> Result<SourceFile, D::Error> { d.read_struct("SourceFile", 8, |d| { let name: FileName = d.read_struct_field("name", 0, |d| Decodable::decode(d))?; let name_was_remapped: bool = d.read_struct_field("name_was_remapped", 1, |d| Decodable::decode(d))?; let src_hash: u128 = d.read_struct_field("src_hash", 2, |d| Decodable::decode(d))?; let start_pos: BytePos = d.read_struct_field("start_pos", 4, |d| Decodable::decode(d))?; let end_pos: BytePos = d.read_struct_field("end_pos", 5, |d| Decodable::decode(d))?; let lines: Vec<BytePos> = d.read_struct_field("lines", 6, |d| { let num_lines: u32 = Decodable::decode(d)?; let mut lines = Vec::with_capacity(num_lines as usize); if num_lines > 0 { // Read the number of bytes used per diff. let bytes_per_diff: u8 = Decodable::decode(d)?; // Read the first element. let mut line_start: BytePos = Decodable::decode(d)?; lines.push(line_start); for _ in 1..num_lines { let diff = match bytes_per_diff { 1 => d.read_u8()? as u32, 2 => d.read_u16()? as u32, 4 => d.read_u32()?, _ => unreachable!() }; line_start = line_start + BytePos(diff); lines.push(line_start); } } Ok(lines) })?; let multibyte_chars: Vec<MultiByteChar> = d.read_struct_field("multibyte_chars", 7, |d| Decodable::decode(d))?; let non_narrow_chars: Vec<NonNarrowChar> = d.read_struct_field("non_narrow_chars", 8, |d| Decodable::decode(d))?; let name_hash: u128 = d.read_struct_field("name_hash", 9, |d| Decodable::decode(d))?; Ok(SourceFile { name, name_was_remapped, unmapped_path: None, // `crate_of_origin` has to be set by the importer. // This value matches up with rustc::hir::def_id::INVALID_CRATE. // That constant is not available here unfortunately :( crate_of_origin: ::std::u32::MAX - 1, start_pos, end_pos, src: None, src_hash, external_src: Lock::new(ExternalSource::AbsentOk), lines, multibyte_chars, non_narrow_chars, name_hash, }) }) } } impl fmt::Debug for SourceFile { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { write!(fmt, "SourceFile({})", self.name) } } impl SourceFile { pub fn new(name: FileName, name_was_remapped: bool, unmapped_path: FileName, mut src: String, start_pos: BytePos) -> SourceFile { remove_bom(&mut src); let src_hash = { let mut hasher: StableHasher<u128> = StableHasher::new(); hasher.write(src.as_bytes()); hasher.finish() }; let name_hash = { let mut hasher: StableHasher<u128> = StableHasher::new(); name.hash(&mut hasher); hasher.finish() }; let end_pos = start_pos.to_usize() + src.len(); let (lines, multibyte_chars, non_narrow_chars) = analyze_source_file::analyze_source_file(&src[..], start_pos); SourceFile { name, name_was_remapped, unmapped_path: Some(unmapped_path), crate_of_origin: 0, src: Some(Lrc::new(src)), src_hash, external_src: Lock::new(ExternalSource::Unneeded), start_pos, end_pos: Pos::from_usize(end_pos), lines, multibyte_chars, non_narrow_chars, name_hash, } } /// Return the BytePos of the beginning of the current line. pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { let line_index = self.lookup_line(pos).unwrap(); self.lines[line_index] } /// Add externally loaded source. /// If the hash of the input doesn't match or no input is supplied via None, /// it is interpreted as an error and the corresponding enum variant is set. /// The return value signifies whether some kind of source is present. pub fn add_external_src<F>(&self, get_src: F) -> bool where F: FnOnce() -> Option<String> { if *self.external_src.borrow() == ExternalSource::AbsentOk { let src = get_src(); let mut external_src = self.external_src.borrow_mut(); // Check that no-one else have provided the source while we were getting it if *external_src == ExternalSource::AbsentOk { if let Some(src) = src { let mut hasher: StableHasher<u128> = StableHasher::new(); hasher.write(src.as_bytes()); if hasher.finish() == self.src_hash { *external_src = ExternalSource::Present(src); return true; } } else { *external_src = ExternalSource::AbsentErr; } false } else { self.src.is_some() || external_src.get_source().is_some() } } else { self.src.is_some() || self.external_src.borrow().get_source().is_some() } } /// Get a line from the list of pre-computed line-beginnings. /// The line number here is 0-based. pub fn get_line(&self, line_number: usize) -> Option<Cow<str>> { fn get_until_newline(src: &str, begin: usize) -> &str { // We can't use `lines.get(line_number+1)` because we might // be parsing when we call this function and thus the current // line is the last one we have line info for. let slice = &src[begin..]; match slice.find('\n') { Some(e) => &slice[..e], None => slice } } let begin = { let line = if let Some(line) = self.lines.get(line_number) { line } else { return None; }; let begin: BytePos = *line - self.start_pos; begin.to_usize() }; if let Some(ref src) = self.src { Some(Cow::from(get_until_newline(src, begin))) } else if let Some(src) = self.external_src.borrow().get_source() { Some(Cow::Owned(String::from(get_until_newline(src, begin)))) } else { None } } pub fn is_real_file(&self) -> bool { self.name.is_real() } pub fn is_imported(&self) -> bool { self.src.is_none() } pub fn byte_length(&self) -> u32 { self.end_pos.0 - self.start_pos.0 } pub fn count_lines(&self) -> usize { self.lines.len() } /// Find the line containing the given position. The return value is the /// index into the `lines` array of this SourceFile, not the 1-based line /// number. If the source_file is empty or the position is located before the /// first line, None is returned. pub fn lookup_line(&self, pos: BytePos) -> Option<usize> { if self.lines.len() == 0 { return None; } let line_index = lookup_line(&self.lines[..], pos); assert!(line_index < self.lines.len() as isize); if line_index >= 0 { Some(line_index as usize) } else { None } } pub fn line_bounds(&self, line_index: usize) -> (BytePos, BytePos) { if self.start_pos == self.end_pos { return (self.start_pos, self.end_pos); } assert!(line_index < self.lines.len()); if line_index == (self.lines.len() - 1) { (self.lines[line_index], self.end_pos) } else { (self.lines[line_index], self.lines[line_index + 1]) } } #[inline] pub fn contains(&self, byte_pos: BytePos) -> bool { byte_pos >= self.start_pos && byte_pos <= self.end_pos } } /// Remove utf-8 BOM if any. fn remove_bom(src: &mut String) { if src.starts_with("\u{feff}") { src.drain(..3); } } // _____________________________________________________________________________ // Pos, BytePos, CharPos // pub trait Pos { fn from_usize(n: usize) -> Self; fn to_usize(&self) -> usize; fn from_u32(n: u32) -> Self; fn to_u32(&self) -> u32; } /// A byte offset. Keep this small (currently 32-bits), as AST contains /// a lot of them. #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] pub struct BytePos(pub u32); /// A character offset. Because of multibyte utf8 characters, a byte offset /// is not equivalent to a character offset. The SourceMap will convert BytePos /// values to CharPos values as necessary. #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] pub struct CharPos(pub usize); // FIXME: Lots of boilerplate in these impls, but so far my attempts to fix // have been unsuccessful impl Pos for BytePos { #[inline(always)] fn from_usize(n: usize) -> BytePos { BytePos(n as u32) } #[inline(always)] fn to_usize(&self) -> usize { self.0 as usize } #[inline(always)] fn from_u32(n: u32) -> BytePos { BytePos(n) } #[inline(always)] fn to_u32(&self) -> u32 { self.0 } } impl Add for BytePos { type Output = BytePos; #[inline(always)] fn add(self, rhs: BytePos) -> BytePos { BytePos((self.to_usize() + rhs.to_usize()) as u32) } } impl Sub for BytePos { type Output = BytePos; #[inline(always)] fn sub(self, rhs: BytePos) -> BytePos { BytePos((self.to_usize() - rhs.to_usize()) as u32) } } impl Encodable for BytePos { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_u32(self.0) } } impl Decodable for BytePos { fn decode<D: Decoder>(d: &mut D) -> Result<BytePos, D::Error> { Ok(BytePos(d.read_u32()?)) } } impl Pos for CharPos { #[inline(always)] fn from_usize(n: usize) -> CharPos { CharPos(n) } #[inline(always)] fn to_usize(&self) -> usize { self.0 } #[inline(always)] fn from_u32(n: u32) -> CharPos { CharPos(n as usize) } #[inline(always)] fn to_u32(&self) -> u32 { self.0 as u32} } impl Add for CharPos { type Output = CharPos; #[inline(always)] fn add(self, rhs: CharPos) -> CharPos { CharPos(self.to_usize() + rhs.to_usize()) } } impl Sub for CharPos { type Output = CharPos; #[inline(always)] fn sub(self, rhs: CharPos) -> CharPos { CharPos(self.to_usize() - rhs.to_usize()) } } // _____________________________________________________________________________ // Loc, LocWithOpt, SourceFileAndLine, SourceFileAndBytePos // /// A source code location used for error reporting #[derive(Debug, Clone)] pub struct Loc { /// Information about the original source pub file: Lrc<SourceFile>, /// The (1-based) line number pub line: usize, /// The (0-based) column offset pub col: CharPos, /// The (0-based) column offset when displayed pub col_display: usize, } /// A source code location used as the result of lookup_char_pos_adj // Actually, *none* of the clients use the filename *or* file field; // perhaps they should just be removed. #[derive(Debug)] pub struct LocWithOpt { pub filename: FileName, pub line: usize, pub col: CharPos, pub file: Option<Lrc<SourceFile>>, } // used to be structural records. Better names, anyone? #[derive(Debug)] pub struct SourceFileAndLine { pub fm: Lrc<SourceFile>, pub line: usize } #[derive(Debug)] pub struct SourceFileAndBytePos { pub fm: Lrc<SourceFile>, pub pos: BytePos } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct LineInfo { /// Index of line, starting from 0. pub line_index: usize, /// Column in line where span begins, starting from 0. pub start_col: CharPos, /// Column in line where span ends, starting from 0, exclusive. pub end_col: CharPos, } pub struct FileLines { pub file: Lrc<SourceFile>, pub lines: Vec<LineInfo> } thread_local!(pub static SPAN_DEBUG: Cell<fn(Span, &mut fmt::Formatter) -> fmt::Result> = Cell::new(default_span_debug)); #[derive(Debug)] pub struct MacroBacktrace { /// span where macro was applied to generate this code pub call_site: Span, /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]") pub macro_decl_name: String, /// span where macro was defined (if known) pub def_site_span: Option<Span>, } // _____________________________________________________________________________ // SpanLinesError, SpanSnippetError, DistinctSources, MalformedCodemapPositions // pub type FileLinesResult = Result<FileLines, SpanLinesError>; #[derive(Clone, PartialEq, Eq, Debug)] pub enum SpanLinesError { IllFormedSpan(Span), DistinctSources(DistinctSources), } #[derive(Clone, PartialEq, Eq, Debug)] pub enum SpanSnippetError { IllFormedSpan(Span), DistinctSources(DistinctSources), MalformedForCodemap(MalformedCodemapPositions), SourceNotAvailable { filename: FileName } } #[derive(Clone, PartialEq, Eq, Debug)] pub struct DistinctSources { pub begin: (FileName, BytePos), pub end: (FileName, BytePos) } #[derive(Clone, PartialEq, Eq, Debug)] pub struct MalformedCodemapPositions { pub name: FileName, pub source_len: usize, pub begin_pos: BytePos, pub end_pos: BytePos } // Given a slice of line start positions and a position, returns the index of // the line the position is on. Returns -1 if the position is located before // the first line. fn lookup_line(lines: &[BytePos], pos: BytePos) -> isize { match lines.binary_search(&pos) { Ok(line) => line as isize, Err(line) => line as isize - 1 } } #[cfg(test)] mod tests { use super::{lookup_line, BytePos}; #[test] fn test_lookup_line() { let lines = &[BytePos(3), BytePos(17), BytePos(28)]; assert_eq!(lookup_line(lines, BytePos(0)), -1); assert_eq!(lookup_line(lines, BytePos(3)), 0); assert_eq!(lookup_line(lines, BytePos(4)), 0); assert_eq!(lookup_line(lines, BytePos(16)), 0); assert_eq!(lookup_line(lines, BytePos(17)), 1); assert_eq!(lookup_line(lines, BytePos(18)), 1); assert_eq!(lookup_line(lines, BytePos(28)), 2); assert_eq!(lookup_line(lines, BytePos(29)), 2); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax_pos/hygiene.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Machinery for hygienic macros, inspired by the `MTWT[1]` paper. //! //! `[1]` Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. 2012. //! *Macros that work together: Compile-time bindings, partial expansion, //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! DOI=10.1017/S0956796812000093 <https://doi.org/10.1017/S0956796812000093> use GLOBALS; use Span; use edition::Edition; use symbol::Symbol; use serialize::{Encodable, Decodable, Encoder, Decoder}; use std::collections::HashMap; use rustc_data_structures::fx::FxHashSet; use std::fmt; /// A SyntaxContext represents a chain of macro expansions (represented by marks). #[derive(Clone, Copy, PartialEq, Eq, Default, PartialOrd, Ord, Hash)] pub struct SyntaxContext(u32); #[derive(Copy, Clone, Debug)] struct SyntaxContextData { outer_mark: Mark, transparency: Transparency, prev_ctxt: SyntaxContext, // This context, but with all transparent and semi-transparent marks filtered away. opaque: SyntaxContext, // This context, but with all transparent marks filtered away. opaque_and_semitransparent: SyntaxContext, } /// A mark is a unique id associated with a macro expansion. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct Mark(u32); #[derive(Clone, Debug)] struct MarkData { parent: Mark, default_transparency: Transparency, is_builtin: bool, expn_info: Option<ExpnInfo>, } /// A property of a macro expansion that determines how identifiers /// produced by that expansion are resolved. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)] pub enum Transparency { /// Identifier produced by a transparent expansion is always resolved at call-site. /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this. Transparent, /// Identifier produced by a semi-transparent expansion may be resolved /// either at call-site or at definition-site. /// If it's a local variable, label or `$crate` then it's resolved at def-site. /// Otherwise it's resolved at call-site. /// `macro_rules` macros behave like this, built-in macros currently behave like this too, /// but that's an implementation detail. SemiTransparent, /// Identifier produced by an opaque expansion is always resolved at definition-site. /// Def-site spans in procedural macros, identifiers from `macro` by default use this. Opaque, } impl Mark { pub fn fresh(parent: Mark) -> Self { HygieneData::with(|data| { data.marks.push(MarkData { parent, // By default expansions behave like `macro_rules`. default_transparency: Transparency::SemiTransparent, is_builtin: false, expn_info: None, }); Mark(data.marks.len() as u32 - 1) }) } /// The mark of the theoretical expansion that generates freshly parsed, unexpanded AST. #[inline] pub fn root() -> Self { Mark(0) } #[inline] pub fn as_u32(self) -> u32 { self.0 } #[inline] pub fn from_u32(raw: u32) -> Mark { Mark(raw) } #[inline] pub fn expn_info(self) -> Option<ExpnInfo> { HygieneData::with(|data| data.marks[self.0 as usize].expn_info.clone()) } #[inline] pub fn set_expn_info(self, info: ExpnInfo) { HygieneData::with(|data| { let old_info = &mut data.marks[self.0 as usize].expn_info; if let Some(old_info) = old_info { panic!("expansion info is reset for the mark {}\nold: {:#?}\nnew: {:#?}", self.0, old_info, info); } *old_info = Some(info); }) } #[inline] pub fn set_default_transparency(self, transparency: Transparency) { assert_ne!(self, Mark::root()); HygieneData::with(|data| data.marks[self.0 as usize].default_transparency = transparency) } #[inline] pub fn is_builtin(self) -> bool { assert_ne!(self, Mark::root()); HygieneData::with(|data| data.marks[self.0 as usize].is_builtin) } #[inline] pub fn set_is_builtin(self, is_builtin: bool) { assert_ne!(self, Mark::root()); HygieneData::with(|data| data.marks[self.0 as usize].is_builtin = is_builtin) } pub fn is_descendant_of(mut self, ancestor: Mark) -> bool { HygieneData::with(|data| { while self != ancestor { if self == Mark::root() { return false; } self = data.marks[self.0 as usize].parent; } true }) } /// Computes a mark such that both input marks are descendants of (or equal to) the returned /// mark. That is, the following holds: /// /// ```rust /// let la = least_ancestor(a, b); /// assert!(a.is_descendant_of(la)) /// assert!(b.is_descendant_of(la)) /// ``` pub fn least_ancestor(mut a: Mark, mut b: Mark) -> Mark { HygieneData::with(|data| { // Compute the path from a to the root let mut a_path = FxHashSet::<Mark>(); while a != Mark::root() { a_path.insert(a); a = data.marks[a.0 as usize].parent; } // While the path from b to the root hasn't intersected, move up the tree while !a_path.contains(&b) { b = data.marks[b.0 as usize].parent; } b }) } // Used for enabling some compatibility fallback in resolve. #[inline] pub fn looks_like_proc_macro_derive(self) -> bool { HygieneData::with(|data| { let mark_data = &data.marks[self.0 as usize]; if mark_data.default_transparency == Transparency::Opaque { if let Some(expn_info) = &mark_data.expn_info { if let ExpnFormat::MacroAttribute(name) = expn_info.format { if name.as_str().starts_with("derive(") { return true; } } } } false }) } } #[derive(Debug)] crate struct HygieneData { marks: Vec<MarkData>, syntax_contexts: Vec<SyntaxContextData>, markings: HashMap<(SyntaxContext, Mark, Transparency), SyntaxContext>, default_edition: Edition, } impl HygieneData { crate fn new() -> Self { HygieneData { marks: vec![MarkData { parent: Mark::root(), // If the root is opaque, then loops searching for an opaque mark // will automatically stop after reaching it. default_transparency: Transparency::Opaque, is_builtin: true, expn_info: None, }], syntax_contexts: vec![SyntaxContextData { outer_mark: Mark::root(), transparency: Transparency::Opaque, prev_ctxt: SyntaxContext(0), opaque: SyntaxContext(0), opaque_and_semitransparent: SyntaxContext(0), }], markings: HashMap::new(), default_edition: Edition::Edition2015, } } fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T { GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut())) } } pub fn default_edition() -> Edition { HygieneData::with(|data| data.default_edition) } pub fn set_default_edition(edition: Edition) { HygieneData::with(|data| data.default_edition = edition); } pub fn clear_markings() { HygieneData::with(|data| data.markings = HashMap::new()); } impl SyntaxContext { pub const fn empty() -> Self { SyntaxContext(0) } crate fn as_u32(self) -> u32 { self.0 } crate fn from_u32(raw: u32) -> SyntaxContext { SyntaxContext(raw) } // Allocate a new SyntaxContext with the given ExpnInfo. This is used when // deserializing Spans from the incr. comp. cache. // FIXME(mw): This method does not restore MarkData::parent or // SyntaxContextData::prev_ctxt or SyntaxContextData::opaque. These things // don't seem to be used after HIR lowering, so everything should be fine // as long as incremental compilation does not kick in before that. pub fn allocate_directly(expansion_info: ExpnInfo) -> Self { HygieneData::with(|data| { data.marks.push(MarkData { parent: Mark::root(), default_transparency: Transparency::SemiTransparent, is_builtin: false, expn_info: Some(expansion_info), }); let mark = Mark(data.marks.len() as u32 - 1); data.syntax_contexts.push(SyntaxContextData { outer_mark: mark, transparency: Transparency::SemiTransparent, prev_ctxt: SyntaxContext::empty(), opaque: SyntaxContext::empty(), opaque_and_semitransparent: SyntaxContext::empty(), }); SyntaxContext(data.syntax_contexts.len() as u32 - 1) }) } /// Extend a syntax context with a given mark and default transparency for that mark. pub fn apply_mark(self, mark: Mark) -> SyntaxContext { assert_ne!(mark, Mark::root()); self.apply_mark_with_transparency( mark, HygieneData::with(|data| data.marks[mark.0 as usize].default_transparency) ) } /// Extend a syntax context with a given mark and transparency pub fn apply_mark_with_transparency(self, mark: Mark, transparency: Transparency) -> SyntaxContext { assert_ne!(mark, Mark::root()); if transparency == Transparency::Opaque { return self.apply_mark_internal(mark, transparency); } let call_site_ctxt = mark.expn_info().map_or(SyntaxContext::empty(), |info| info.call_site.ctxt()); let call_site_ctxt = if transparency == Transparency::SemiTransparent { call_site_ctxt.modern() } else { call_site_ctxt.modern_and_legacy() }; if call_site_ctxt == SyntaxContext::empty() { return self.apply_mark_internal(mark, transparency); } // Otherwise, `mark` is a macros 1.0 definition and the call site is in a // macros 2.0 expansion, i.e. a macros 1.0 invocation is in a macros 2.0 definition. // // In this case, the tokens from the macros 1.0 definition inherit the hygiene // at their invocation. That is, we pretend that the macros 1.0 definition // was defined at its invocation (i.e. inside the macros 2.0 definition) // so that the macros 2.0 definition remains hygienic. // // See the example at `test/run-pass/hygiene/legacy_interaction.rs`. let mut ctxt = call_site_ctxt; for (mark, transparency) in self.marks() { ctxt = ctxt.apply_mark_internal(mark, transparency); } ctxt.apply_mark_internal(mark, transparency) } fn apply_mark_internal(self, mark: Mark, transparency: Transparency) -> SyntaxContext { HygieneData::with(|data| { let syntax_contexts = &mut data.syntax_contexts; let mut opaque = syntax_contexts[self.0 as usize].opaque; let mut opaque_and_semitransparent = syntax_contexts[self.0 as usize].opaque_and_semitransparent; if transparency >= Transparency::Opaque { let prev_ctxt = opaque; opaque = *data.markings.entry((prev_ctxt, mark, transparency)).or_insert_with(|| { let new_opaque = SyntaxContext(syntax_contexts.len() as u32); syntax_contexts.push(SyntaxContextData { outer_mark: mark, transparency, prev_ctxt, opaque: new_opaque, opaque_and_semitransparent: new_opaque, }); new_opaque }); } if transparency >= Transparency::SemiTransparent { let prev_ctxt = opaque_and_semitransparent; opaque_and_semitransparent = *data.markings.entry((prev_ctxt, mark, transparency)).or_insert_with(|| { let new_opaque_and_semitransparent = SyntaxContext(syntax_contexts.len() as u32); syntax_contexts.push(SyntaxContextData { outer_mark: mark, transparency, prev_ctxt, opaque, opaque_and_semitransparent: new_opaque_and_semitransparent, }); new_opaque_and_semitransparent }); } let prev_ctxt = self; *data.markings.entry((prev_ctxt, mark, transparency)).or_insert_with(|| { let new_opaque_and_semitransparent_and_transparent = SyntaxContext(syntax_contexts.len() as u32); syntax_contexts.push(SyntaxContextData { outer_mark: mark, transparency, prev_ctxt, opaque, opaque_and_semitransparent, }); new_opaque_and_semitransparent_and_transparent }) }) } /// Pulls a single mark off of the syntax context. This effectively moves the /// context up one macro definition level. That is, if we have a nested macro /// definition as follows: /// /// ```rust /// macro_rules! f { /// macro_rules! g { /// ... /// } /// } /// ``` /// /// and we have a SyntaxContext that is referring to something declared by an invocation /// of g (call it g1), calling remove_mark will result in the SyntaxContext for the /// invocation of f that created g1. /// Returns the mark that was removed. pub fn remove_mark(&mut self) -> Mark { HygieneData::with(|data| { let outer_mark = data.syntax_contexts[self.0 as usize].outer_mark; *self = data.syntax_contexts[self.0 as usize].prev_ctxt; outer_mark }) } pub fn marks(mut self) -> Vec<(Mark, Transparency)> { HygieneData::with(|data| { let mut marks = Vec::new(); while self != SyntaxContext::empty() { let ctxt_data = &data.syntax_contexts[self.0 as usize]; marks.push((ctxt_data.outer_mark, ctxt_data.transparency)); self = ctxt_data.prev_ctxt; } marks.reverse(); marks }) } /// Adjust this context for resolution in a scope created by the given expansion. /// For example, consider the following three resolutions of `f`: /// /// ```rust /// mod foo { pub fn f() {} } // `f`'s `SyntaxContext` is empty. /// m!(f); /// macro m($f:ident) { /// mod bar { /// pub fn f() {} // `f`'s `SyntaxContext` has a single `Mark` from `m`. /// pub fn $f() {} // `$f`'s `SyntaxContext` is empty. /// } /// foo::f(); // `f`'s `SyntaxContext` has a single `Mark` from `m` /// //^ Since `mod foo` is outside this expansion, `adjust` removes the mark from `f`, /// //| and it resolves to `::foo::f`. /// bar::f(); // `f`'s `SyntaxContext` has a single `Mark` from `m` /// //^ Since `mod bar` not outside this expansion, `adjust` does not change `f`, /// //| and it resolves to `::bar::f`. /// bar::$f(); // `f`'s `SyntaxContext` is empty. /// //^ Since `mod bar` is not outside this expansion, `adjust` does not change `$f`, /// //| and it resolves to `::bar::$f`. /// } /// ``` /// This returns the expansion whose definition scope we use to privacy check the resolution, /// or `None` if we privacy check as usual (i.e. not w.r.t. a macro definition scope). pub fn adjust(&mut self, expansion: Mark) -> Option<Mark> { let mut scope = None; while !expansion.is_descendant_of(self.outer()) { scope = Some(self.remove_mark()); } scope } /// Adjust this context for resolution in a scope created by the given expansion /// via a glob import with the given `SyntaxContext`. /// For example: /// /// ```rust /// m!(f); /// macro m($i:ident) { /// mod foo { /// pub fn f() {} // `f`'s `SyntaxContext` has a single `Mark` from `m`. /// pub fn $i() {} // `$i`'s `SyntaxContext` is empty. /// } /// n(f); /// macro n($j:ident) { /// use foo::*; /// f(); // `f`'s `SyntaxContext` has a mark from `m` and a mark from `n` /// //^ `glob_adjust` removes the mark from `n`, so this resolves to `foo::f`. /// $i(); // `$i`'s `SyntaxContext` has a mark from `n` /// //^ `glob_adjust` removes the mark from `n`, so this resolves to `foo::$i`. /// $j(); // `$j`'s `SyntaxContext` has a mark from `m` /// //^ This cannot be glob-adjusted, so this is a resolution error. /// } /// } /// ``` /// This returns `None` if the context cannot be glob-adjusted. /// Otherwise, it returns the scope to use when privacy checking (see `adjust` for details). pub fn glob_adjust(&mut self, expansion: Mark, mut glob_ctxt: SyntaxContext) -> Option<Option<Mark>> { let mut scope = None; while !expansion.is_descendant_of(glob_ctxt.outer()) { scope = Some(glob_ctxt.remove_mark()); if self.remove_mark() != scope.unwrap() { return None; } } if self.adjust(expansion).is_some() { return None; } Some(scope) } /// Undo `glob_adjust` if possible: /// /// ```rust /// if let Some(privacy_checking_scope) = self.reverse_glob_adjust(expansion, glob_ctxt) { /// assert!(self.glob_adjust(expansion, glob_ctxt) == Some(privacy_checking_scope)); /// } /// ``` pub fn reverse_glob_adjust(&mut self, expansion: Mark, mut glob_ctxt: SyntaxContext) -> Option<Option<Mark>> { if self.adjust(expansion).is_some() { return None; } let mut marks = Vec::new(); while !expansion.is_descendant_of(glob_ctxt.outer()) { marks.push(glob_ctxt.remove_mark()); } let scope = marks.last().cloned(); while let Some(mark) = marks.pop() { *self = self.apply_mark(mark); } Some(scope) } #[inline] pub fn modern(self) -> SyntaxContext { HygieneData::with(|data| data.syntax_contexts[self.0 as usize].opaque) } #[inline] pub fn modern_and_legacy(self) -> SyntaxContext { HygieneData::with(|data| data.syntax_contexts[self.0 as usize].opaque_and_semitransparent) } #[inline] pub fn outer(self) -> Mark { HygieneData::with(|data| data.syntax_contexts[self.0 as usize].outer_mark) } } impl fmt::Debug for SyntaxContext { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "#{}", self.0) } } /// Extra information for tracking spans of macro and syntax sugar expansion #[derive(Clone, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct ExpnInfo { /// The location of the actual macro invocation or syntax sugar , e.g. /// `let x = foo!();` or `if let Some(y) = x {}` /// /// This may recursively refer to other macro invocations, e.g. if /// `foo!()` invoked `bar!()` internally, and there was an /// expression inside `bar!`; the call_site of the expression in /// the expansion would point to the `bar!` invocation; that /// call_site span would have its own ExpnInfo, with the call_site /// pointing to the `foo!` invocation. pub call_site: Span, /// The span of the macro definition itself. The macro may not /// have a sensible definition span (e.g. something defined /// completely inside libsyntax) in which case this is None. /// This span serves only informational purpose and is not used for resolution. pub def_site: Option<Span>, /// The format with which the macro was invoked. pub format: ExpnFormat, /// Whether the macro is allowed to use #[unstable]/feature-gated /// features internally without forcing the whole crate to opt-in /// to them. pub allow_internal_unstable: bool, /// Whether the macro is allowed to use `unsafe` internally /// even if the user crate has `#![forbid(unsafe_code)]`. pub allow_internal_unsafe: bool, /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`) /// for a given macro. pub local_inner_macros: bool, /// Edition of the crate in which the macro is defined. pub edition: Edition, } /// The source of expansion. #[derive(Clone, Hash, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub enum ExpnFormat { /// e.g. #[derive(...)] <item> MacroAttribute(Symbol), /// e.g. `format!()` MacroBang(Symbol), /// Desugaring done by the compiler during HIR lowering. CompilerDesugaring(CompilerDesugaringKind) } impl ExpnFormat { pub fn name(&self) -> Symbol { match *self { ExpnFormat::MacroBang(name) | ExpnFormat::MacroAttribute(name) => name, ExpnFormat::CompilerDesugaring(kind) => kind.name(), } } } /// The kind of compiler desugaring. #[derive(Clone, Copy, Hash, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub enum CompilerDesugaringKind { QuestionMark, Catch, /// Desugaring of an `impl Trait` in return type position /// to an `existential type Foo: Trait;` + replacing the /// `impl Trait` with `Foo`. ExistentialReturnType, Async, ForLoop, } impl CompilerDesugaringKind { pub fn name(self) -> Symbol { Symbol::intern(match self { CompilerDesugaringKind::Async => "async", CompilerDesugaringKind::QuestionMark => "?", CompilerDesugaringKind::Catch => "do catch", CompilerDesugaringKind::ExistentialReturnType => "existential type", CompilerDesugaringKind::ForLoop => "for loop", }) } } impl Encodable for SyntaxContext { fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> { Ok(()) // FIXME(jseyfried) intercrate hygiene } } impl Decodable for SyntaxContext { fn decode<D: Decoder>(_: &mut D) -> Result<SyntaxContext, D::Error> { Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/libsyntax_pos/edition.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt; use std::str::FromStr; /// The edition of the compiler (RFC 2052) #[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, RustcEncodable, RustcDecodable)] #[non_exhaustive] pub enum Edition { // editions must be kept in order, oldest to newest /// The 2015 edition Edition2015, /// The 2018 edition Edition2018, // when adding new editions, be sure to update: // // - Update the `ALL_EDITIONS` const // - Update the EDITION_NAME_LIST const // - add a `rust_####()` function to the session // - update the enum in Cargo's sources as well } // must be in order from oldest to newest pub const ALL_EDITIONS: &[Edition] = &[Edition::Edition2015, Edition::Edition2018]; pub const EDITION_NAME_LIST: &'static str = "2015|2018"; pub const DEFAULT_EDITION: Edition = Edition::Edition2015; impl fmt::Display for Edition { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let s = match *self { Edition::Edition2015 => "2015", Edition::Edition2018 => "2018", }; write!(f, "{}", s) } } impl Edition { pub fn lint_name(&self) -> &'static str { match *self { Edition::Edition2015 => "rust_2015_compatibility", Edition::Edition2018 => "rust_2018_compatibility", } } pub fn feature_name(&self) -> &'static str { match *self { Edition::Edition2015 => "rust_2015_preview", Edition::Edition2018 => "rust_2018_preview", } } pub fn is_stable(&self) -> bool { match *self { Edition::Edition2015 => true, Edition::Edition2018 => false, } } } impl FromStr for Edition { type Err = (); fn from_str(s: &str) -> Result<Self, ()> { match s { "2015" => Ok(Edition::Edition2015), "2018" => Ok(Edition::Edition2018), _ => Err(()) } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/sorted_map.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::borrow::Borrow; use std::cmp::Ordering; use std::convert::From; use std::mem; use std::ops::{RangeBounds, Bound, Index, IndexMut}; /// `SortedMap` is a data structure with similar characteristics as BTreeMap but /// slightly different trade-offs: lookup, inseration, and removal are O(log(N)) /// and elements can be iterated in order cheaply. /// /// `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it /// stores data in a more compact way. It also supports accessing contiguous /// ranges of elements as a slice, and slices of already sorted elements can be /// inserted efficiently. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug, RustcEncodable, RustcDecodable)] pub struct SortedMap<K: Ord, V> { data: Vec<(K,V)> } impl<K: Ord, V> SortedMap<K, V> { #[inline] pub fn new() -> SortedMap<K, V> { SortedMap { data: vec![] } } /// Construct a `SortedMap` from a presorted set of elements. This is faster /// than creating an empty map and then inserting the elements individually. /// /// It is up to the caller to make sure that the elements are sorted by key /// and that there are no duplicates. #[inline] pub fn from_presorted_elements(elements: Vec<(K, V)>) -> SortedMap<K, V> { debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0)); SortedMap { data: elements } } #[inline] pub fn insert(&mut self, key: K, mut value: V) -> Option<V> { match self.lookup_index_for(&key) { Ok(index) => { let slot = unsafe { self.data.get_unchecked_mut(index) }; mem::swap(&mut slot.1, &mut value); Some(value) } Err(index) => { self.data.insert(index, (key, value)); None } } } #[inline] pub fn remove(&mut self, key: &K) -> Option<V> { match self.lookup_index_for(key) { Ok(index) => { Some(self.data.remove(index).1) } Err(_) => { None } } } #[inline] pub fn get(&self, key: &K) -> Option<&V> { match self.lookup_index_for(key) { Ok(index) => { unsafe { Some(&self.data.get_unchecked(index).1) } } Err(_) => { None } } } #[inline] pub fn get_mut(&mut self, key: &K) -> Option<&mut V> { match self.lookup_index_for(key) { Ok(index) => { unsafe { Some(&mut self.data.get_unchecked_mut(index).1) } } Err(_) => { None } } } #[inline] pub fn clear(&mut self) { self.data.clear(); } /// Iterate over elements, sorted by key #[inline] pub fn iter(&self) -> ::std::slice::Iter<(K, V)> { self.data.iter() } /// Iterate over the keys, sorted #[inline] pub fn keys(&self) -> impl Iterator<Item=&K> + ExactSizeIterator { self.data.iter().map(|&(ref k, _)| k) } /// Iterate over values, sorted by key #[inline] pub fn values(&self) -> impl Iterator<Item=&V> + ExactSizeIterator { self.data.iter().map(|&(_, ref v)| v) } #[inline] pub fn len(&self) -> usize { self.data.len() } #[inline] pub fn range<R>(&self, range: R) -> &[(K, V)] where R: RangeBounds<K> { let (start, end) = self.range_slice_indices(range); (&self.data[start .. end]) } #[inline] pub fn remove_range<R>(&mut self, range: R) where R: RangeBounds<K> { let (start, end) = self.range_slice_indices(range); self.data.splice(start .. end, ::std::iter::empty()); } /// Mutate all keys with the given function `f`. This mutation must not /// change the sort-order of keys. #[inline] pub fn offset_keys<F>(&mut self, f: F) where F: Fn(&mut K) { self.data.iter_mut().map(|&mut (ref mut k, _)| k).for_each(f); } /// Inserts a presorted range of elements into the map. If the range can be /// inserted as a whole in between to existing elements of the map, this /// will be faster than inserting the elements individually. /// /// It is up to the caller to make sure that the elements are sorted by key /// and that there are no duplicates. #[inline] pub fn insert_presorted(&mut self, mut elements: Vec<(K, V)>) { if elements.is_empty() { return } debug_assert!(elements.windows(2).all(|w| w[0].0 < w[1].0)); let start_index = self.lookup_index_for(&elements[0].0); let drain = match start_index { Ok(index) => { let mut drain = elements.drain(..); self.data[index] = drain.next().unwrap(); drain } Err(index) => { if index == self.data.len() || elements.last().unwrap().0 < self.data[index].0 { // We can copy the whole range without having to mix with // existing elements. self.data.splice(index .. index, elements.drain(..)); return } let mut drain = elements.drain(..); self.data.insert(index, drain.next().unwrap()); drain } }; // Insert the rest for (k, v) in drain { self.insert(k, v); } } /// Looks up the key in `self.data` via `slice::binary_search()`. #[inline(always)] fn lookup_index_for(&self, key: &K) -> Result<usize, usize> { self.data.binary_search_by(|&(ref x, _)| x.cmp(key)) } #[inline] fn range_slice_indices<R>(&self, range: R) -> (usize, usize) where R: RangeBounds<K> { let start = match range.start_bound() { Bound::Included(ref k) => { match self.lookup_index_for(k) { Ok(index) | Err(index) => index } } Bound::Excluded(ref k) => { match self.lookup_index_for(k) { Ok(index) => index + 1, Err(index) => index, } } Bound::Unbounded => 0, }; let end = match range.end_bound() { Bound::Included(ref k) => { match self.lookup_index_for(k) { Ok(index) => index + 1, Err(index) => index, } } Bound::Excluded(ref k) => { match self.lookup_index_for(k) { Ok(index) | Err(index) => index, } } Bound::Unbounded => self.data.len(), }; (start, end) } } impl<K: Ord, V> IntoIterator for SortedMap<K, V> { type Item = (K, V); type IntoIter = ::std::vec::IntoIter<(K, V)>; fn into_iter(self) -> Self::IntoIter { self.data.into_iter() } } impl<K: Ord, V, Q: Borrow<K>> Index<Q> for SortedMap<K, V> { type Output = V; fn index(&self, index: Q) -> &Self::Output { let k: &K = index.borrow(); self.get(k).unwrap() } } impl<K: Ord, V, Q: Borrow<K>> IndexMut<Q> for SortedMap<K, V> { fn index_mut(&mut self, index: Q) -> &mut Self::Output { let k: &K = index.borrow(); self.get_mut(k).unwrap() } } impl<K: Ord, V, I: Iterator<Item=(K, V)>> From<I> for SortedMap<K, V> { fn from(data: I) -> Self { let mut data: Vec<(K, V)> = data.collect(); data.sort_unstable_by(|&(ref k1, _), &(ref k2, _)| k1.cmp(k2)); data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| { k1.cmp(k2) == Ordering::Equal }); SortedMap { data } } } #[cfg(test)] mod tests { use super::SortedMap; #[test] fn test_insert_and_iter() { let mut map = SortedMap::new(); let mut expected = Vec::new(); for x in 0 .. 100 { assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected); let x = 1000 - x * 2; map.insert(x, x); expected.insert(0, (x, x)); } } #[test] fn test_get_and_index() { let mut map = SortedMap::new(); let mut expected = Vec::new(); for x in 0 .. 100 { let x = 1000 - x; if x & 1 == 0 { map.insert(x, x); } expected.push(x); } for mut x in expected { if x & 1 == 0 { assert_eq!(map.get(&x), Some(&x)); assert_eq!(map.get_mut(&x), Some(&mut x)); assert_eq!(map[&x], x); assert_eq!(&mut map[&x], &mut x); } else { assert_eq!(map.get(&x), None); assert_eq!(map.get_mut(&x), None); } } } #[test] fn test_range() { let mut map = SortedMap::new(); map.insert(1, 1); map.insert(3, 3); map.insert(6, 6); map.insert(9, 9); let keys = |s: &[(_, _)]| { s.into_iter().map(|e| e.0).collect::<Vec<u32>>() }; for start in 0 .. 11 { for end in 0 .. 11 { if end < start { continue } let mut expected = vec![1, 3, 6, 9]; expected.retain(|&x| x >= start && x < end); assert_eq!(keys(map.range(start..end)), expected, "range = {}..{}", start, end); } } } #[test] fn test_offset_keys() { let mut map = SortedMap::new(); map.insert(1, 1); map.insert(3, 3); map.insert(6, 6); map.offset_keys(|k| *k += 1); let mut expected = SortedMap::new(); expected.insert(2, 1); expected.insert(4, 3); expected.insert(7, 6); assert_eq!(map, expected); } fn keys(s: SortedMap<u32, u32>) -> Vec<u32> { s.into_iter().map(|(k, _)| k).collect::<Vec<u32>>() } fn elements(s: SortedMap<u32, u32>) -> Vec<(u32, u32)> { s.into_iter().collect::<Vec<(u32, u32)>>() } #[test] fn test_remove_range() { let mut map = SortedMap::new(); map.insert(1, 1); map.insert(3, 3); map.insert(6, 6); map.insert(9, 9); for start in 0 .. 11 { for end in 0 .. 11 { if end < start { continue } let mut expected = vec![1, 3, 6, 9]; expected.retain(|&x| x < start || x >= end); let mut map = map.clone(); map.remove_range(start .. end); assert_eq!(keys(map), expected, "range = {}..{}", start, end); } } } #[test] fn test_remove() { let mut map = SortedMap::new(); let mut expected = Vec::new(); for x in 0..10 { map.insert(x, x); expected.push((x, x)); } for x in 0 .. 10 { let mut map = map.clone(); let mut expected = expected.clone(); assert_eq!(map.remove(&x), Some(x)); expected.remove(x as usize); assert_eq!(map.iter().cloned().collect::<Vec<_>>(), expected); } } #[test] fn test_insert_presorted_non_overlapping() { let mut map = SortedMap::new(); map.insert(2, 0); map.insert(8, 0); map.insert_presorted(vec![(3, 0), (7, 0)]); let expected = vec![2, 3, 7, 8]; assert_eq!(keys(map), expected); } #[test] fn test_insert_presorted_first_elem_equal() { let mut map = SortedMap::new(); map.insert(2, 2); map.insert(8, 8); map.insert_presorted(vec![(2, 0), (7, 7)]); let expected = vec![(2, 0), (7, 7), (8, 8)]; assert_eq!(elements(map), expected); } #[test] fn test_insert_presorted_last_elem_equal() { let mut map = SortedMap::new(); map.insert(2, 2); map.insert(8, 8); map.insert_presorted(vec![(3, 3), (8, 0)]); let expected = vec![(2, 2), (3, 3), (8, 0)]; assert_eq!(elements(map), expected); } #[test] fn test_insert_presorted_shuffle() { let mut map = SortedMap::new(); map.insert(2, 2); map.insert(7, 7); map.insert_presorted(vec![(1, 1), (3, 3), (8, 8)]); let expected = vec![(1, 1), (2, 2), (3, 3), (7, 7), (8, 8)]; assert_eq!(elements(map), expected); } #[test] fn test_insert_presorted_at_end() { let mut map = SortedMap::new(); map.insert(1, 1); map.insert(2, 2); map.insert_presorted(vec![(3, 3), (8, 8)]); let expected = vec![(1, 1), (2, 2), (3, 3), (8, 8)]; assert_eq!(elements(map), expected); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/indexed_vec.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt::Debug; use std::iter::{self, FromIterator}; use std::slice; use std::marker::PhantomData; use std::ops::{Index, IndexMut, Range, RangeBounds}; use std::fmt; use std::hash::Hash; use std::vec; use std::u32; use rustc_serialize as serialize; /// Represents some newtyped `usize` wrapper. /// /// (purpose: avoid mixing indexes for different bitvector domains.) pub trait Idx: Copy + 'static + Ord + Debug + Hash { fn new(idx: usize) -> Self; fn index(self) -> usize; fn increment_by(&mut self, amount: usize) { let v = self.index() + amount; *self = Self::new(v); } } impl Idx for usize { #[inline] fn new(idx: usize) -> Self { idx } #[inline] fn index(self) -> usize { self } } impl Idx for u32 { #[inline] fn new(idx: usize) -> Self { assert!(idx <= u32::MAX as usize); idx as u32 } #[inline] fn index(self) -> usize { self as usize } } #[macro_export] macro_rules! newtype_index { // ---- public rules ---- // Use default constants ($name:ident) => ( newtype_index!( // Leave out derives marker so we can use its absence to ensure it comes first @type [$name] @max [::std::u32::MAX] @debug_format ["{}"]); ); // Define any constants ($name:ident { $($tokens:tt)+ }) => ( newtype_index!( // Leave out derives marker so we can use its absence to ensure it comes first @type [$name] @max [::std::u32::MAX] @debug_format ["{}"] $($tokens)+); ); // ---- private rules ---- // Base case, user-defined constants (if any) have already been defined (@derives [$($derives:ident,)*] @pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt]) => ( #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, $($derives),*)] pub struct $type($($pub)* u32); impl Idx for $type { #[inline] fn new(value: usize) -> Self { assert!(value < ($max) as usize); $type(value as u32) } #[inline] fn index(self) -> usize { self.0 as usize } } impl ::std::iter::Step for $type { fn steps_between(start: &Self, end: &Self) -> Option<usize> { <usize as ::std::iter::Step>::steps_between( &Idx::index(*start), &Idx::index(*end), ) } fn replace_one(&mut self) -> Self { ::std::mem::replace(self, Self::new(1)) } fn replace_zero(&mut self) -> Self { ::std::mem::replace(self, Self::new(0)) } fn add_one(&self) -> Self { Self::new(Idx::index(*self) + 1) } fn sub_one(&self) -> Self { Self::new(Idx::index(*self) - 1) } fn add_usize(&self, u: usize) -> Option<Self> { Idx::index(*self).checked_add(u).map(Self::new) } } newtype_index!( @handle_debug @derives [$($derives,)*] @type [$type] @debug_format [$debug_format]); ); // base case for handle_debug where format is custom. No Debug implementation is emitted. (@handle_debug @derives [$($_derives:ident,)*] @type [$type:ident] @debug_format [custom]) => (); // base case for handle_debug, no debug overrides found, so use default (@handle_debug @derives [] @type [$type:ident] @debug_format [$debug_format:tt]) => ( impl ::std::fmt::Debug for $type { fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(fmt, $debug_format, self.0) } } ); // Debug is requested for derive, don't generate any Debug implementation. (@handle_debug @derives [Debug, $($derives:ident,)*] @type [$type:ident] @debug_format [$debug_format:tt]) => (); // It's not Debug, so just pop it off the front of the derives stack and check the rest. (@handle_debug @derives [$_derive:ident, $($derives:ident,)*] @type [$type:ident] @debug_format [$debug_format:tt]) => ( newtype_index!( @handle_debug @derives [$($derives,)*] @type [$type] @debug_format [$debug_format]); ); // Handle the case where someone wants to make the internal field public (@type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] pub idx $($tokens:tt)*) => ( newtype_index!( @pub [pub] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // The default case is that the internal field is private (@type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] $($tokens:tt)*) => ( newtype_index!( @pub [] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // Append comma to end of derives list if it's missing (@pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] derive [$($derives:ident),*] $($tokens:tt)*) => ( newtype_index!( @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] derive [$($derives,)*] $($tokens)*); ); // By not including the @derives marker in this list nor in the default args, we can force it // to come first if it exists. When encodable is custom, just use the derives list as-is. (@pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] derive [$($derives:ident,)+] ENCODABLE = custom $($tokens:tt)*) => ( newtype_index!( @derives [$($derives,)+] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // By not including the @derives marker in this list nor in the default args, we can force it // to come first if it exists. When encodable isn't custom, add serialization traits by default. (@pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] derive [$($derives:ident,)+] $($tokens:tt)*) => ( newtype_index!( @derives [$($derives,)+ RustcDecodable, RustcEncodable,] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // The case where no derives are added, but encodable is overridden. Don't // derive serialization traits (@pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] ENCODABLE = custom $($tokens:tt)*) => ( newtype_index!( @derives [] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // The case where no derives are added, add serialization derives by default (@pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] $($tokens:tt)*) => ( newtype_index!( @derives [RustcDecodable, RustcEncodable,] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // Rewrite final without comma to one that includes comma (@derives [$($derives:ident,)*] @pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] $name:ident = $constant:expr) => ( newtype_index!( @derives [$($derives,)*] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $name = $constant,); ); // Rewrite final const without comma to one that includes comma (@derives [$($derives:ident,)*] @pub [$($pub:tt)*] @type [$type:ident] @max [$_max:expr] @debug_format [$debug_format:tt] $(#[doc = $doc:expr])* const $name:ident = $constant:expr) => ( newtype_index!( @derives [$($derives,)*] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $(#[doc = $doc])* const $name = $constant,); ); // Replace existing default for max (@derives [$($derives:ident,)*] @pub [$($pub:tt)*] @type [$type:ident] @max [$_max:expr] @debug_format [$debug_format:tt] MAX = $max:expr, $($tokens:tt)*) => ( newtype_index!( @derives [$($derives,)*] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // Replace existing default for debug_format (@derives [$($derives:ident,)*] @pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$_debug_format:tt] DEBUG_FORMAT = $debug_format:tt, $($tokens:tt)*) => ( newtype_index!( @derives [$($derives,)*] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); // Assign a user-defined constant (@derives [$($derives:ident,)*] @pub [$($pub:tt)*] @type [$type:ident] @max [$max:expr] @debug_format [$debug_format:tt] $(#[doc = $doc:expr])* const $name:ident = $constant:expr, $($tokens:tt)*) => ( $(#[doc = $doc])* pub const $name: $type = $type($constant); newtype_index!( @derives [$($derives,)*] @pub [$($pub)*] @type [$type] @max [$max] @debug_format [$debug_format] $($tokens)*); ); } #[derive(Clone, PartialEq, Eq, Hash)] pub struct IndexVec<I: Idx, T> { pub raw: Vec<T>, _marker: PhantomData<fn(&I)> } // Whether `IndexVec` is `Send` depends only on the data, // not the phantom data. unsafe impl<I: Idx, T> Send for IndexVec<I, T> where T: Send {} impl<I: Idx, T: serialize::Encodable> serialize::Encodable for IndexVec<I, T> { fn encode<S: serialize::Encoder>(&self, s: &mut S) -> Result<(), S::Error> { serialize::Encodable::encode(&self.raw, s) } } impl<I: Idx, T: serialize::Decodable> serialize::Decodable for IndexVec<I, T> { fn decode<D: serialize::Decoder>(d: &mut D) -> Result<Self, D::Error> { serialize::Decodable::decode(d).map(|v| { IndexVec { raw: v, _marker: PhantomData } }) } } impl<I: Idx, T: fmt::Debug> fmt::Debug for IndexVec<I, T> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&self.raw, fmt) } } pub type Enumerated<I, J> = iter::Map<iter::Enumerate<J>, IntoIdx<I>>; impl<I: Idx, T> IndexVec<I, T> { #[inline] pub fn new() -> Self { IndexVec { raw: Vec::new(), _marker: PhantomData } } #[inline] pub fn from_raw(raw: Vec<T>) -> Self { IndexVec { raw, _marker: PhantomData } } #[inline] pub fn with_capacity(capacity: usize) -> Self { IndexVec { raw: Vec::with_capacity(capacity), _marker: PhantomData } } #[inline] pub fn from_elem<S>(elem: T, universe: &IndexVec<I, S>) -> Self where T: Clone { IndexVec { raw: vec![elem; universe.len()], _marker: PhantomData } } #[inline] pub fn from_elem_n(elem: T, n: usize) -> Self where T: Clone { IndexVec { raw: vec![elem; n], _marker: PhantomData } } #[inline] pub fn push(&mut self, d: T) -> I { let idx = I::new(self.len()); self.raw.push(d); idx } #[inline] pub fn pop(&mut self) -> Option<T> { self.raw.pop() } #[inline] pub fn len(&self) -> usize { self.raw.len() } #[inline] pub fn is_empty(&self) -> bool { self.raw.is_empty() } #[inline] pub fn into_iter(self) -> vec::IntoIter<T> { self.raw.into_iter() } #[inline] pub fn into_iter_enumerated(self) -> Enumerated<I, vec::IntoIter<T>> { self.raw.into_iter().enumerate().map(IntoIdx { _marker: PhantomData }) } #[inline] pub fn iter(&self) -> slice::Iter<T> { self.raw.iter() } #[inline] pub fn iter_enumerated(&self) -> Enumerated<I, slice::Iter<'_, T>> { self.raw.iter().enumerate().map(IntoIdx { _marker: PhantomData }) } #[inline] pub fn indices(&self) -> iter::Map<Range<usize>, IntoIdx<I>> { (0..self.len()).map(IntoIdx { _marker: PhantomData }) } #[inline] pub fn iter_mut(&mut self) -> slice::IterMut<T> { self.raw.iter_mut() } #[inline] pub fn iter_enumerated_mut(&mut self) -> Enumerated<I, slice::IterMut<'_, T>> { self.raw.iter_mut().enumerate().map(IntoIdx { _marker: PhantomData }) } #[inline] pub fn drain<'a, R: RangeBounds<usize>>( &'a mut self, range: R) -> impl Iterator<Item=T> + 'a { self.raw.drain(range) } #[inline] pub fn drain_enumerated<'a, R: RangeBounds<usize>>( &'a mut self, range: R) -> impl Iterator<Item=(I, T)> + 'a { self.raw.drain(range).enumerate().map(IntoIdx { _marker: PhantomData }) } #[inline] pub fn last(&self) -> Option<I> { self.len().checked_sub(1).map(I::new) } #[inline] pub fn shrink_to_fit(&mut self) { self.raw.shrink_to_fit() } #[inline] pub fn swap(&mut self, a: I, b: I) { self.raw.swap(a.index(), b.index()) } #[inline] pub fn truncate(&mut self, a: usize) { self.raw.truncate(a) } #[inline] pub fn get(&self, index: I) -> Option<&T> { self.raw.get(index.index()) } #[inline] pub fn get_mut(&mut self, index: I) -> Option<&mut T> { self.raw.get_mut(index.index()) } /// Return mutable references to two distinct elements, a and b. Panics if a == b. #[inline] pub fn pick2_mut(&mut self, a: I, b: I) -> (&mut T, &mut T) { let (ai, bi) = (a.index(), b.index()); assert!(ai != bi); if ai < bi { let (c1, c2) = self.raw.split_at_mut(bi); (&mut c1[ai], &mut c2[0]) } else { let (c2, c1) = self.pick2_mut(b, a); (c1, c2) } } pub fn convert_index_type<Ix: Idx>(self) -> IndexVec<Ix, T> { IndexVec { raw: self.raw, _marker: PhantomData, } } } impl<I: Idx, T: Clone> IndexVec<I, T> { /// Grows the index vector so that it contains an entry for /// `elem`; if that is already true, then has no /// effect. Otherwise, inserts new values as needed by invoking /// `fill_value`. #[inline] pub fn ensure_contains_elem(&mut self, elem: I, fill_value: impl FnMut() -> T) { let min_new_len = elem.index() + 1; if self.len() < min_new_len { self.raw.resize_with(min_new_len, fill_value); } } #[inline] pub fn resize(&mut self, new_len: usize, value: T) { self.raw.resize(new_len, value) } #[inline] pub fn resize_to_elem(&mut self, elem: I, fill_value: impl FnMut() -> T) { let min_new_len = elem.index() + 1; self.raw.resize_with(min_new_len, fill_value); } } impl<I: Idx, T: Ord> IndexVec<I, T> { #[inline] pub fn binary_search(&self, value: &T) -> Result<I, I> { match self.raw.binary_search(value) { Ok(i) => Ok(Idx::new(i)), Err(i) => Err(Idx::new(i)), } } } impl<I: Idx, T> Index<I> for IndexVec<I, T> { type Output = T; #[inline] fn index(&self, index: I) -> &T { &self.raw[index.index()] } } impl<I: Idx, T> IndexMut<I> for IndexVec<I, T> { #[inline] fn index_mut(&mut self, index: I) -> &mut T { &mut self.raw[index.index()] } } impl<I: Idx, T> Default for IndexVec<I, T> { #[inline] fn default() -> Self { Self::new() } } impl<I: Idx, T> Extend<T> for IndexVec<I, T> { #[inline] fn extend<J: IntoIterator<Item = T>>(&mut self, iter: J) { self.raw.extend(iter); } } impl<I: Idx, T> FromIterator<T> for IndexVec<I, T> { #[inline] fn from_iter<J>(iter: J) -> Self where J: IntoIterator<Item=T> { IndexVec { raw: FromIterator::from_iter(iter), _marker: PhantomData } } } impl<I: Idx, T> IntoIterator for IndexVec<I, T> { type Item = T; type IntoIter = vec::IntoIter<T>; #[inline] fn into_iter(self) -> vec::IntoIter<T> { self.raw.into_iter() } } impl<'a, I: Idx, T> IntoIterator for &'a IndexVec<I, T> { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; #[inline] fn into_iter(self) -> slice::Iter<'a, T> { self.raw.iter() } } impl<'a, I: Idx, T> IntoIterator for &'a mut IndexVec<I, T> { type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; #[inline] fn into_iter(self) -> slice::IterMut<'a, T> { self.raw.iter_mut() } } pub struct IntoIdx<I: Idx> { _marker: PhantomData<fn(&I)> } impl<I: Idx, T> FnOnce<((usize, T),)> for IntoIdx<I> { type Output = (I, T); extern "rust-call" fn call_once(self, ((n, t),): ((usize, T),)) -> Self::Output { (I::new(n), t) } } impl<I: Idx, T> FnMut<((usize, T),)> for IntoIdx<I> { extern "rust-call" fn call_mut(&mut self, ((n, t),): ((usize, T),)) -> Self::Output { (I::new(n), t) } } impl<I: Idx> FnOnce<(usize,)> for IntoIdx<I> { type Output = I; extern "rust-call" fn call_once(self, (n,): (usize,)) -> Self::Output { I::new(n) } } impl<I: Idx> FnMut<(usize,)> for IntoIdx<I> { extern "rust-call" fn call_mut(&mut self, (n,): (usize,)) -> Self::Output { I::new(n) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/Cargo.toml
[package] authors = ["The Rust Project Developers"] name = "rustc_data_structures" version = "0.0.0" [lib] name = "rustc_data_structures" path = "lib.rs" [dependencies] ena = "=0.9.3" log = "=0.4.3" rustc_cratesio_shim = { path = "../librustc_cratesio_shim" } serialize = { path = "../libserialize" } cfg-if = "=0.1.4" stable_deref_trait = "=1.1.0" parking_lot_core = "=0.2.14" rustc-rayon = "=0.1.1" rustc-rayon-core = "=0.1.1" rustc-hash = "=1.0.1" smallvec = { version = "0.6.5", features = ["union"] } [dependencies.parking_lot] version = "0.5" features = ["nightly"]
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/transitive_relation.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use bitvec::BitMatrix; use fx::FxHashMap; use sync::Lock; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use stable_hasher::{HashStable, StableHasher, StableHasherResult}; use std::fmt::Debug; use std::hash::Hash; use std::mem; #[derive(Clone, Debug)] pub struct TransitiveRelation<T: Clone + Debug + Eq + Hash> { // List of elements. This is used to map from a T to a usize. elements: Vec<T>, // Maps each element to an index. map: FxHashMap<T, Index>, // List of base edges in the graph. Require to compute transitive // closure. edges: Vec<Edge>, // This is a cached transitive closure derived from the edges. // Currently, we build it lazilly and just throw out any existing // copy whenever a new edge is added. (The Lock is to permit // the lazy computation.) This is kind of silly, except for the // fact its size is tied to `self.elements.len()`, so I wanted to // wait before building it up to avoid reallocating as new edges // are added with new elements. Perhaps better would be to ask the // user for a batch of edges to minimize this effect, but I // already wrote the code this way. :P -nmatsakis closure: Lock<Option<BitMatrix<usize, usize>>>, } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Debug)] struct Index(usize); #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)] struct Edge { source: Index, target: Index, } impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> { pub fn new() -> TransitiveRelation<T> { TransitiveRelation { elements: vec![], map: FxHashMap(), edges: vec![], closure: Lock::new(None), } } pub fn is_empty(&self) -> bool { self.edges.is_empty() } fn index(&self, a: &T) -> Option<Index> { self.map.get(a).cloned() } fn add_index(&mut self, a: T) -> Index { let &mut TransitiveRelation { ref mut elements, ref mut closure, ref mut map, .. } = self; *map.entry(a.clone()) .or_insert_with(|| { elements.push(a); // if we changed the dimensions, clear the cache *closure.get_mut() = None; Index(elements.len() - 1) }) } /// Applies the (partial) function to each edge and returns a new /// relation. If `f` returns `None` for any end-point, returns /// `None`. pub fn maybe_map<F, U>(&self, mut f: F) -> Option<TransitiveRelation<U>> where F: FnMut(&T) -> Option<U>, U: Clone + Debug + Eq + Hash + Clone, { let mut result = TransitiveRelation::new(); for edge in &self.edges { result.add(f(&self.elements[edge.source.0])?, f(&self.elements[edge.target.0])?); } Some(result) } /// Indicate that `a < b` (where `<` is this relation) pub fn add(&mut self, a: T, b: T) { let a = self.add_index(a); let b = self.add_index(b); let edge = Edge { source: a, target: b, }; if !self.edges.contains(&edge) { self.edges.push(edge); // added an edge, clear the cache *self.closure.get_mut() = None; } } /// Check whether `a < target` (transitively) pub fn contains(&self, a: &T, b: &T) -> bool { match (self.index(a), self.index(b)) { (Some(a), Some(b)) => self.with_closure(|closure| closure.contains(a.0, b.0)), (None, _) | (_, None) => false, } } /// Thinking of `x R y` as an edge `x -> y` in a graph, this /// returns all things reachable from `a`. /// /// Really this probably ought to be `impl Iterator<Item=&T>`, but /// I'm too lazy to make that work, and -- given the caching /// strategy -- it'd be a touch tricky anyhow. pub fn reachable_from(&self, a: &T) -> Vec<&T> { match self.index(a) { Some(a) => self.with_closure(|closure| { closure.iter(a.0).map(|i| &self.elements[i]).collect() }), None => vec![], } } /// Picks what I am referring to as the "postdominating" /// upper-bound for `a` and `b`. This is usually the least upper /// bound, but in cases where there is no single least upper /// bound, it is the "mutual immediate postdominator", if you /// imagine a graph where `a < b` means `a -> b`. /// /// This function is needed because region inference currently /// requires that we produce a single "UB", and there is no best /// choice for the LUB. Rather than pick arbitrarily, I pick a /// less good, but predictable choice. This should help ensure /// that region inference yields predictable results (though it /// itself is not fully sufficient). /// /// Examples are probably clearer than any prose I could write /// (there are corresponding tests below, btw). In each case, /// the query is `postdom_upper_bound(a, b)`: /// /// ```text /// // returns Some(x), which is also LUB /// a -> a1 -> x /// ^ /// | /// b -> b1 ---+ /// /// // returns Some(x), which is not LUB (there is none) /// // diagonal edges run left-to-right /// a -> a1 -> x /// \/ ^ /// /\ | /// b -> b1 ---+ /// /// // returns None /// a -> a1 /// b -> b1 /// ``` pub fn postdom_upper_bound(&self, a: &T, b: &T) -> Option<&T> { let mubs = self.minimal_upper_bounds(a, b); self.mutual_immediate_postdominator(mubs) } /// Viewing the relation as a graph, computes the "mutual /// immediate postdominator" of a set of points (if one /// exists). See `postdom_upper_bound` for details. pub fn mutual_immediate_postdominator<'a>(&'a self, mut mubs: Vec<&'a T>) -> Option<&'a T> { loop { match mubs.len() { 0 => return None, 1 => return Some(mubs[0]), _ => { let m = mubs.pop().unwrap(); let n = mubs.pop().unwrap(); mubs.extend(self.minimal_upper_bounds(n, m)); } } } } /// Returns the set of bounds `X` such that: /// /// - `a < X` and `b < X` /// - there is no `Y != X` such that `a < Y` and `Y < X` /// - except for the case where `X < a` (i.e., a strongly connected /// component in the graph). In that case, the smallest /// representative of the SCC is returned (as determined by the /// internal indices). /// /// Note that this set can, in principle, have any size. pub fn minimal_upper_bounds(&self, a: &T, b: &T) -> Vec<&T> { let (mut a, mut b) = match (self.index(a), self.index(b)) { (Some(a), Some(b)) => (a, b), (None, _) | (_, None) => { return vec![]; } }; // in some cases, there are some arbitrary choices to be made; // it doesn't really matter what we pick, as long as we pick // the same thing consistently when queried, so ensure that // (a, b) are in a consistent relative order if a > b { mem::swap(&mut a, &mut b); } let lub_indices = self.with_closure(|closure| { // Easy case is when either a < b or b < a: if closure.contains(a.0, b.0) { return vec![b.0]; } if closure.contains(b.0, a.0) { return vec![a.0]; } // Otherwise, the tricky part is that there may be some c // where a < c and b < c. In fact, there may be many such // values. So here is what we do: // // 1. Find the vector `[X | a < X && b < X]` of all values // `X` where `a < X` and `b < X`. In terms of the // graph, this means all values reachable from both `a` // and `b`. Note that this vector is also a set, but we // use the term vector because the order matters // to the steps below. // - This vector contains upper bounds, but they are // not minimal upper bounds. So you may have e.g. // `[x, y, tcx, z]` where `x < tcx` and `y < tcx` and // `z < x` and `z < y`: // // z --+---> x ----+----> tcx // | | // | | // +---> y ----+ // // In this case, we really want to return just `[z]`. // The following steps below achieve this by gradually // reducing the list. // 2. Pare down the vector using `pare_down`. This will // remove elements from the vector that can be reached // by an earlier element. // - In the example above, this would convert `[x, y, // tcx, z]` to `[x, y, z]`. Note that `x` and `y` are // still in the vector; this is because while `z < x` // (and `z < y`) holds, `z` comes after them in the // vector. // 3. Reverse the vector and repeat the pare down process. // - In the example above, we would reverse to // `[z, y, x]` and then pare down to `[z]`. // 4. Reverse once more just so that we yield a vector in // increasing order of index. Not necessary, but why not. // // I believe this algorithm yields a minimal set. The // argument is that, after step 2, we know that no element // can reach its successors (in the vector, not the graph). // After step 3, we know that no element can reach any of // its predecesssors (because of step 2) nor successors // (because we just called `pare_down`) // // This same algorithm is used in `parents` below. let mut candidates = closure.intersection(a.0, b.0); // (1) pare_down(&mut candidates, closure); // (2) candidates.reverse(); // (3a) pare_down(&mut candidates, closure); // (3b) candidates }); lub_indices.into_iter() .rev() // (4) .map(|i| &self.elements[i]) .collect() } /// Given an element A, returns the maximal set {B} of elements B /// such that /// /// - A != B /// - A R B is true /// - for each i, j: B[i] R B[j] does not hold /// /// The intuition is that this moves "one step up" through a lattice /// (where the relation is encoding the `<=` relation for the lattice). /// So e.g. if the relation is `->` and we have /// /// ``` /// a -> b -> d -> f /// | ^ /// +--> c -> e ---+ /// ``` /// /// then `parents(a)` returns `[b, c]`. The `postdom_parent` function /// would further reduce this to just `f`. pub fn parents(&self, a: &T) -> Vec<&T> { let a = match self.index(a) { Some(a) => a, None => return vec![] }; // Steal the algorithm for `minimal_upper_bounds` above, but // with a slight tweak. In the case where `a R a`, we remove // that from the set of candidates. let ancestors = self.with_closure(|closure| { let mut ancestors = closure.intersection(a.0, a.0); // Remove anything that can reach `a`. If this is a // reflexive relation, this will include `a` itself. ancestors.retain(|&e| !closure.contains(e, a.0)); pare_down(&mut ancestors, closure); // (2) ancestors.reverse(); // (3a) pare_down(&mut ancestors, closure); // (3b) ancestors }); ancestors.into_iter() .rev() // (4) .map(|i| &self.elements[i]) .collect() } /// A "best" parent in some sense. See `parents` and /// `postdom_upper_bound` for more details. pub fn postdom_parent(&self, a: &T) -> Option<&T> { self.mutual_immediate_postdominator(self.parents(a)) } fn with_closure<OP, R>(&self, op: OP) -> R where OP: FnOnce(&BitMatrix<usize, usize>) -> R { let mut closure_cell = self.closure.borrow_mut(); let mut closure = closure_cell.take(); if closure.is_none() { closure = Some(self.compute_closure()); } let result = op(closure.as_ref().unwrap()); *closure_cell = closure; result } fn compute_closure(&self) -> BitMatrix<usize, usize> { let mut matrix = BitMatrix::new(self.elements.len(), self.elements.len()); let mut changed = true; while changed { changed = false; for edge in &self.edges { // add an edge from S -> T changed |= matrix.add(edge.source.0, edge.target.0); // add all outgoing edges from T into S changed |= matrix.merge(edge.target.0, edge.source.0); } } matrix } } /// Pare down is used as a step in the LUB computation. It edits the /// candidates array in place by removing any element j for which /// there exists an earlier element i<j such that i -> j. That is, /// after you run `pare_down`, you know that for all elements that /// remain in candidates, they cannot reach any of the elements that /// come after them. /// /// Examples follow. Assume that a -> b -> c and x -> y -> z. /// /// - Input: `[a, b, x]`. Output: `[a, x]`. /// - Input: `[b, a, x]`. Output: `[b, a, x]`. /// - Input: `[a, x, b, y]`. Output: `[a, x]`. fn pare_down(candidates: &mut Vec<usize>, closure: &BitMatrix<usize, usize>) { let mut i = 0; while i < candidates.len() { let candidate_i = candidates[i]; i += 1; let mut j = i; let mut dead = 0; while j < candidates.len() { let candidate_j = candidates[j]; if closure.contains(candidate_i, candidate_j) { // If `i` can reach `j`, then we can remove `j`. So just // mark it as dead and move on; subsequent indices will be // shifted into its place. dead += 1; } else { candidates[j - dead] = candidate_j; } j += 1; } candidates.truncate(j - dead); } } impl<T> Encodable for TransitiveRelation<T> where T: Clone + Encodable + Debug + Eq + Hash + Clone { fn encode<E: Encoder>(&self, s: &mut E) -> Result<(), E::Error> { s.emit_struct("TransitiveRelation", 2, |s| { s.emit_struct_field("elements", 0, |s| self.elements.encode(s))?; s.emit_struct_field("edges", 1, |s| self.edges.encode(s))?; Ok(()) }) } } impl<T> Decodable for TransitiveRelation<T> where T: Clone + Decodable + Debug + Eq + Hash + Clone { fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> { d.read_struct("TransitiveRelation", 2, |d| { let elements: Vec<T> = d.read_struct_field("elements", 0, |d| Decodable::decode(d))?; let edges = d.read_struct_field("edges", 1, |d| Decodable::decode(d))?; let map = elements.iter() .enumerate() .map(|(index, elem)| (elem.clone(), Index(index))) .collect(); Ok(TransitiveRelation { elements, edges, map, closure: Lock::new(None) }) }) } } impl<CTX, T> HashStable<CTX> for TransitiveRelation<T> where T: HashStable<CTX> + Eq + Debug + Clone + Hash { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) { // We are assuming here that the relation graph has been built in a // deterministic way and we can just hash it the way it is. let TransitiveRelation { ref elements, ref edges, // "map" is just a copy of elements vec map: _, // "closure" is just a copy of the data above closure: _ } = *self; elements.hash_stable(hcx, hasher); edges.hash_stable(hcx, hasher); } } impl<CTX> HashStable<CTX> for Edge { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) { let Edge { ref source, ref target, } = *self; source.hash_stable(hcx, hasher); target.hash_stable(hcx, hasher); } } impl<CTX> HashStable<CTX> for Index { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) { let Index(idx) = *self; idx.hash_stable(hcx, hasher); } } #[test] fn test_one_step() { let mut relation = TransitiveRelation::new(); relation.add("a", "b"); relation.add("a", "c"); assert!(relation.contains(&"a", &"c")); assert!(relation.contains(&"a", &"b")); assert!(!relation.contains(&"b", &"a")); assert!(!relation.contains(&"a", &"d")); } #[test] fn test_many_steps() { let mut relation = TransitiveRelation::new(); relation.add("a", "b"); relation.add("a", "c"); relation.add("a", "f"); relation.add("b", "c"); relation.add("b", "d"); relation.add("b", "e"); relation.add("e", "g"); assert!(relation.contains(&"a", &"b")); assert!(relation.contains(&"a", &"c")); assert!(relation.contains(&"a", &"d")); assert!(relation.contains(&"a", &"e")); assert!(relation.contains(&"a", &"f")); assert!(relation.contains(&"a", &"g")); assert!(relation.contains(&"b", &"g")); assert!(!relation.contains(&"a", &"x")); assert!(!relation.contains(&"b", &"f")); } #[test] fn mubs_triangle() { // a -> tcx // ^ // | // b let mut relation = TransitiveRelation::new(); relation.add("a", "tcx"); relation.add("b", "tcx"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"tcx"]); assert_eq!(relation.parents(&"a"), vec![&"tcx"]); assert_eq!(relation.parents(&"b"), vec![&"tcx"]); } #[test] fn mubs_best_choice1() { // 0 -> 1 <- 3 // | ^ | // | | | // +--> 2 <--+ // // mubs(0,3) = [1] // This tests a particular state in the algorithm, in which we // need the second pare down call to get the right result (after // intersection, we have [1, 2], but 2 -> 1). let mut relation = TransitiveRelation::new(); relation.add("0", "1"); relation.add("0", "2"); relation.add("2", "1"); relation.add("3", "1"); relation.add("3", "2"); assert_eq!(relation.minimal_upper_bounds(&"0", &"3"), vec![&"2"]); assert_eq!(relation.parents(&"0"), vec![&"2"]); assert_eq!(relation.parents(&"2"), vec![&"1"]); assert!(relation.parents(&"1").is_empty()); } #[test] fn mubs_best_choice2() { // 0 -> 1 <- 3 // | | | // | v | // +--> 2 <--+ // // mubs(0,3) = [2] // Like the precedecing test, but in this case intersection is [2, // 1], and hence we rely on the first pare down call. let mut relation = TransitiveRelation::new(); relation.add("0", "1"); relation.add("0", "2"); relation.add("1", "2"); relation.add("3", "1"); relation.add("3", "2"); assert_eq!(relation.minimal_upper_bounds(&"0", &"3"), vec![&"1"]); assert_eq!(relation.parents(&"0"), vec![&"1"]); assert_eq!(relation.parents(&"1"), vec![&"2"]); assert!(relation.parents(&"2").is_empty()); } #[test] fn mubs_no_best_choice() { // in this case, the intersection yields [1, 2], and the "pare // down" calls find nothing to remove. let mut relation = TransitiveRelation::new(); relation.add("0", "1"); relation.add("0", "2"); relation.add("3", "1"); relation.add("3", "2"); assert_eq!(relation.minimal_upper_bounds(&"0", &"3"), vec![&"1", &"2"]); assert_eq!(relation.parents(&"0"), vec![&"1", &"2"]); assert_eq!(relation.parents(&"3"), vec![&"1", &"2"]); } #[test] fn mubs_best_choice_scc() { // in this case, 1 and 2 form a cycle; we pick arbitrarily (but // consistently). let mut relation = TransitiveRelation::new(); relation.add("0", "1"); relation.add("0", "2"); relation.add("1", "2"); relation.add("2", "1"); relation.add("3", "1"); relation.add("3", "2"); assert_eq!(relation.minimal_upper_bounds(&"0", &"3"), vec![&"1"]); assert_eq!(relation.parents(&"0"), vec![&"1"]); } #[test] fn pdub_crisscross() { // diagonal edges run left-to-right // a -> a1 -> x // \/ ^ // /\ | // b -> b1 ---+ let mut relation = TransitiveRelation::new(); relation.add("a", "a1"); relation.add("a", "b1"); relation.add("b", "a1"); relation.add("b", "b1"); relation.add("a1", "x"); relation.add("b1", "x"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"a1", &"b1"]); assert_eq!(relation.postdom_upper_bound(&"a", &"b"), Some(&"x")); assert_eq!(relation.postdom_parent(&"a"), Some(&"x")); assert_eq!(relation.postdom_parent(&"b"), Some(&"x")); } #[test] fn pdub_crisscross_more() { // diagonal edges run left-to-right // a -> a1 -> a2 -> a3 -> x // \/ \/ ^ // /\ /\ | // b -> b1 -> b2 ---------+ let mut relation = TransitiveRelation::new(); relation.add("a", "a1"); relation.add("a", "b1"); relation.add("b", "a1"); relation.add("b", "b1"); relation.add("a1", "a2"); relation.add("a1", "b2"); relation.add("b1", "a2"); relation.add("b1", "b2"); relation.add("a2", "a3"); relation.add("a3", "x"); relation.add("b2", "x"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"a1", &"b1"]); assert_eq!(relation.minimal_upper_bounds(&"a1", &"b1"), vec![&"a2", &"b2"]); assert_eq!(relation.postdom_upper_bound(&"a", &"b"), Some(&"x")); assert_eq!(relation.postdom_parent(&"a"), Some(&"x")); assert_eq!(relation.postdom_parent(&"b"), Some(&"x")); } #[test] fn pdub_lub() { // a -> a1 -> x // ^ // | // b -> b1 ---+ let mut relation = TransitiveRelation::new(); relation.add("a", "a1"); relation.add("b", "b1"); relation.add("a1", "x"); relation.add("b1", "x"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"x"]); assert_eq!(relation.postdom_upper_bound(&"a", &"b"), Some(&"x")); assert_eq!(relation.postdom_parent(&"a"), Some(&"a1")); assert_eq!(relation.postdom_parent(&"b"), Some(&"b1")); assert_eq!(relation.postdom_parent(&"a1"), Some(&"x")); assert_eq!(relation.postdom_parent(&"b1"), Some(&"x")); } #[test] fn mubs_intermediate_node_on_one_side_only() { // a -> c -> d // ^ // | // b // "digraph { a -> c -> d; b -> d; }", let mut relation = TransitiveRelation::new(); relation.add("a", "c"); relation.add("c", "d"); relation.add("b", "d"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"d"]); } #[test] fn mubs_scc_1() { // +-------------+ // | +----+ | // | v | | // a -> c -> d <-+ // ^ // | // b // "digraph { a -> c -> d; d -> c; a -> d; b -> d; }", let mut relation = TransitiveRelation::new(); relation.add("a", "c"); relation.add("c", "d"); relation.add("d", "c"); relation.add("a", "d"); relation.add("b", "d"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"c"]); } #[test] fn mubs_scc_2() { // +----+ // v | // a -> c -> d // ^ ^ // | | // +--- b // "digraph { a -> c -> d; d -> c; b -> d; b -> c; }", let mut relation = TransitiveRelation::new(); relation.add("a", "c"); relation.add("c", "d"); relation.add("d", "c"); relation.add("b", "d"); relation.add("b", "c"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"c"]); } #[test] fn mubs_scc_3() { // +---------+ // v | // a -> c -> d -> e // ^ ^ // | | // b ---+ // "digraph { a -> c -> d -> e -> c; b -> d; b -> e; }", let mut relation = TransitiveRelation::new(); relation.add("a", "c"); relation.add("c", "d"); relation.add("d", "e"); relation.add("e", "c"); relation.add("b", "d"); relation.add("b", "e"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"c"]); } #[test] fn mubs_scc_4() { // +---------+ // v | // a -> c -> d -> e // | ^ ^ // +---------+ | // | // b ---+ // "digraph { a -> c -> d -> e -> c; a -> d; b -> e; }" let mut relation = TransitiveRelation::new(); relation.add("a", "c"); relation.add("c", "d"); relation.add("d", "e"); relation.add("e", "c"); relation.add("a", "d"); relation.add("b", "e"); assert_eq!(relation.minimal_upper_bounds(&"a", &"b"), vec![&"c"]); } #[test] fn parent() { // An example that was misbehaving in the compiler. // // 4 -> 1 -> 3 // \ | / // \ v / // 2 -> 0 // // plus a bunch of self-loops // // Here `->` represents `<=` and `0` is `'static`. let pairs = vec![ (2, /*->*/ 0), (2, /*->*/ 2), (0, /*->*/ 0), (0, /*->*/ 0), (1, /*->*/ 0), (1, /*->*/ 1), (3, /*->*/ 0), (3, /*->*/ 3), (4, /*->*/ 0), (4, /*->*/ 1), (1, /*->*/ 3), ]; let mut relation = TransitiveRelation::new(); for (a, b) in pairs { relation.add(a, b); } let p = relation.postdom_parent(&3); assert_eq!(p, Some(&0)); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/fingerprint.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::mem; use stable_hasher; use serialize; use serialize::opaque::{EncodeResult, Encoder, Decoder}; #[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Clone, Copy)] pub struct Fingerprint(u64, u64); impl Fingerprint { pub const ZERO: Fingerprint = Fingerprint(0, 0); #[inline] pub fn from_smaller_hash(hash: u64) -> Fingerprint { Fingerprint(hash, hash) } #[inline] pub fn to_smaller_hash(&self) -> u64 { self.0 } #[inline] pub fn as_value(&self) -> (u64, u64) { (self.0, self.1) } #[inline] pub fn combine(self, other: Fingerprint) -> Fingerprint { // See https://stackoverflow.com/a/27952689 on why this function is // implemented this way. Fingerprint( self.0.wrapping_mul(3).wrapping_add(other.0), self.1.wrapping_mul(3).wrapping_add(other.1) ) } // Combines two hashes in an order independent way. Make sure this is what // you want. #[inline] pub fn combine_commutative(self, other: Fingerprint) -> Fingerprint { let a = (self.1 as u128) << 64 | self.0 as u128; let b = (other.1 as u128) << 64 | other.0 as u128; let c = a.wrapping_add(b); Fingerprint((c >> 64) as u64, c as u64) } pub fn to_hex(&self) -> String { format!("{:x}{:x}", self.0, self.1) } pub fn encode_opaque(&self, encoder: &mut Encoder) -> EncodeResult { let bytes: [u8; 16] = unsafe { mem::transmute([self.0.to_le(), self.1.to_le()]) }; encoder.emit_raw_bytes(&bytes); Ok(()) } pub fn decode_opaque<'a>(decoder: &mut Decoder<'a>) -> Result<Fingerprint, String> { let mut bytes = [0; 16]; decoder.read_raw_bytes(&mut bytes)?; let [l, r]: [u64; 2] = unsafe { mem::transmute(bytes) }; Ok(Fingerprint(u64::from_le(l), u64::from_le(r))) } } impl ::std::fmt::Display for Fingerprint { fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(formatter, "{:x}-{:x}", self.0, self.1) } } impl stable_hasher::StableHasherResult for Fingerprint { fn finish(hasher: stable_hasher::StableHasher<Self>) -> Self { let (_0, _1) = hasher.finalize(); Fingerprint(_0, _1) } } impl_stable_hash_via_hash!(Fingerprint); impl serialize::UseSpecializedEncodable for Fingerprint { } impl serialize::UseSpecializedDecodable for Fingerprint { } impl serialize::SpecializedEncoder<Fingerprint> for serialize::opaque::Encoder { fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> { f.encode_opaque(self) } } impl<'a> serialize::SpecializedDecoder<Fingerprint> for serialize::opaque::Decoder<'a> { fn specialized_decode(&mut self) -> Result<Fingerprint, Self::Error> { Fingerprint::decode_opaque(self) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/small_vec.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A vector type intended to be used for collecting from iterators onto the stack. //! //! Space for up to N elements is provided on the stack. If more elements are collected, Vec is //! used to store the values on the heap. SmallVec is similar to AccumulateVec, but adds //! the ability to push elements. //! //! The N above is determined by Array's implementor, by way of an associated constant. use smallvec::{Array, SmallVec}; pub type OneVector<T> = SmallVec<[T; 1]>; pub trait ExpectOne<A: Array> { fn expect_one(self, err: &'static str) -> A::Item; } impl<A: Array> ExpectOne<A> for SmallVec<A> { fn expect_one(self, err: &'static str) -> A::Item { assert!(self.len() == 1, err); self.into_iter().next().unwrap() } } #[cfg(test)] mod tests { extern crate test; use super::*; #[test] #[should_panic] fn test_expect_one_zero() { let _: isize = OneVector::new().expect_one(""); } #[test] #[should_panic] fn test_expect_one_many() { OneVector::from_vec(vec![1, 2]).expect_one(""); } #[test] fn test_expect_one_one() { assert_eq!(1, (smallvec![1] as OneVector<_>).expect_one("")); assert_eq!(1, OneVector::from_vec(vec![1]).expect_one("")); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/bitvec.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use indexed_vec::{Idx, IndexVec}; use std::marker::PhantomData; type Word = u128; const WORD_BITS: usize = 128; /// A very simple BitArray type. /// /// It does not support resizing after creation; use `BitVector` for that. #[derive(Clone, Debug, PartialEq)] pub struct BitArray<C: Idx> { data: Vec<Word>, marker: PhantomData<C>, } #[derive(Clone, Debug, PartialEq)] pub struct BitVector<C: Idx> { data: BitArray<C>, } impl<C: Idx> BitVector<C> { pub fn grow(&mut self, num_bits: C) { self.data.grow(num_bits) } pub fn new() -> BitVector<C> { BitVector { data: BitArray::new(0), } } pub fn with_capacity(bits: usize) -> BitVector<C> { BitVector { data: BitArray::new(bits), } } /// Returns true if the bit has changed. #[inline] pub fn insert(&mut self, bit: C) -> bool { self.grow(bit); self.data.insert(bit) } #[inline] pub fn contains(&self, bit: C) -> bool { let (word, mask) = word_mask(bit); if let Some(word) = self.data.data.get(word) { (word & mask) != 0 } else { false } } } impl<C: Idx> BitArray<C> { // Do not make this method public, instead switch your use case to BitVector. #[inline] fn grow(&mut self, num_bits: C) { let num_words = words(num_bits); if self.data.len() <= num_words { self.data.resize(num_words + 1, 0) } } #[inline] pub fn new(num_bits: usize) -> BitArray<C> { let num_words = words(num_bits); BitArray { data: vec![0; num_words], marker: PhantomData, } } #[inline] pub fn clear(&mut self) { for p in &mut self.data { *p = 0; } } pub fn count(&self) -> usize { self.data.iter().map(|e| e.count_ones() as usize).sum() } /// True if `self` contains the bit `bit`. #[inline] pub fn contains(&self, bit: C) -> bool { let (word, mask) = word_mask(bit); (self.data[word] & mask) != 0 } /// True if `self` contains all the bits in `other`. /// /// The two vectors must have the same length. #[inline] pub fn contains_all(&self, other: &BitArray<C>) -> bool { assert_eq!(self.data.len(), other.data.len()); self.data.iter().zip(&other.data).all(|(a, b)| (a & b) == *b) } #[inline] pub fn is_empty(&self) -> bool { self.data.iter().all(|a| *a == 0) } /// Returns true if the bit has changed. #[inline] pub fn insert(&mut self, bit: C) -> bool { let (word, mask) = word_mask(bit); let data = &mut self.data[word]; let value = *data; let new_value = value | mask; *data = new_value; new_value != value } /// Sets all bits to true. pub fn insert_all(&mut self) { for data in &mut self.data { *data = u128::max_value(); } } /// Returns true if the bit has changed. #[inline] pub fn remove(&mut self, bit: C) -> bool { let (word, mask) = word_mask(bit); let data = &mut self.data[word]; let value = *data; let new_value = value & !mask; *data = new_value; new_value != value } #[inline] pub fn merge(&mut self, all: &BitArray<C>) -> bool { assert!(self.data.len() == all.data.len()); let mut changed = false; for (i, j) in self.data.iter_mut().zip(&all.data) { let value = *i; *i = value | *j; if value != *i { changed = true; } } changed } /// Iterates over indexes of set bits in a sorted order #[inline] pub fn iter<'a>(&'a self) -> BitIter<'a, C> { BitIter { iter: self.data.iter(), current: 0, idx: 0, marker: PhantomData, } } } pub struct BitIter<'a, C: Idx> { iter: ::std::slice::Iter<'a, Word>, current: Word, idx: usize, marker: PhantomData<C> } impl<'a, C: Idx> Iterator for BitIter<'a, C> { type Item = C; fn next(&mut self) -> Option<C> { while self.current == 0 { self.current = if let Some(&i) = self.iter.next() { if i == 0 { self.idx += WORD_BITS; continue; } else { self.idx = words(self.idx) * WORD_BITS; i } } else { return None; } } let offset = self.current.trailing_zeros() as usize; self.current >>= offset; self.current >>= 1; // shift otherwise overflows for 0b1000_0000_…_0000 self.idx += offset + 1; Some(C::new(self.idx - 1)) } fn size_hint(&self) -> (usize, Option<usize>) { let (_, upper) = self.iter.size_hint(); (0, upper) } } /// A "bit matrix" is basically a matrix of booleans represented as /// one gigantic bitvector. In other words, it is as if you have /// `rows` bitvectors, each of length `columns`. #[derive(Clone, Debug)] pub struct BitMatrix<R: Idx, C: Idx> { columns: usize, vector: Vec<Word>, phantom: PhantomData<(R, C)>, } impl<R: Idx, C: Idx> BitMatrix<R, C> { /// Create a new `rows x columns` matrix, initially empty. pub fn new(rows: usize, columns: usize) -> BitMatrix<R, C> { // For every element, we need one bit for every other // element. Round up to an even number of words. let words_per_row = words(columns); BitMatrix { columns, vector: vec![0; rows * words_per_row], phantom: PhantomData, } } /// The range of bits for a given row. fn range(&self, row: R) -> (usize, usize) { let row = row.index(); let words_per_row = words(self.columns); let start = row * words_per_row; (start, start + words_per_row) } /// Sets the cell at `(row, column)` to true. Put another way, add /// `column` to the bitset for `row`. /// /// Returns true if this changed the matrix, and false otherwise. pub fn add(&mut self, row: R, column: R) -> bool { let (start, _) = self.range(row); let (word, mask) = word_mask(column); let vector = &mut self.vector[..]; let v1 = vector[start + word]; let v2 = v1 | mask; vector[start + word] = v2; v1 != v2 } /// Do the bits from `row` contain `column`? Put another way, is /// the matrix cell at `(row, column)` true? Put yet another way, /// if the matrix represents (transitive) reachability, can /// `row` reach `column`? pub fn contains(&self, row: R, column: R) -> bool { let (start, _) = self.range(row); let (word, mask) = word_mask(column); (self.vector[start + word] & mask) != 0 } /// Returns those indices that are true in rows `a` and `b`. This /// is an O(n) operation where `n` is the number of elements /// (somewhat independent from the actual size of the /// intersection, in particular). pub fn intersection(&self, a: R, b: R) -> Vec<C> { let (a_start, a_end) = self.range(a); let (b_start, b_end) = self.range(b); let mut result = Vec::with_capacity(self.columns); for (base, (i, j)) in (a_start..a_end).zip(b_start..b_end).enumerate() { let mut v = self.vector[i] & self.vector[j]; for bit in 0..WORD_BITS { if v == 0 { break; } if v & 0x1 != 0 { result.push(C::new(base * WORD_BITS + bit)); } v >>= 1; } } result } /// Add the bits from row `read` to the bits from row `write`, /// return true if anything changed. /// /// This is used when computing transitive reachability because if /// you have an edge `write -> read`, because in that case /// `write` can reach everything that `read` can (and /// potentially more). pub fn merge(&mut self, read: R, write: R) -> bool { let (read_start, read_end) = self.range(read); let (write_start, write_end) = self.range(write); let vector = &mut self.vector[..]; let mut changed = false; for (read_index, write_index) in (read_start..read_end).zip(write_start..write_end) { let v1 = vector[write_index]; let v2 = v1 | vector[read_index]; vector[write_index] = v2; changed |= v1 != v2; } changed } /// Iterates through all the columns set to true in a given row of /// the matrix. pub fn iter<'a>(&'a self, row: R) -> BitIter<'a, C> { let (start, end) = self.range(row); BitIter { iter: self.vector[start..end].iter(), current: 0, idx: 0, marker: PhantomData, } } } /// A moderately sparse bit matrix: rows are appended lazily, but columns /// within appended rows are instantiated fully upon creation. #[derive(Clone, Debug)] pub struct SparseBitMatrix<R, C> where R: Idx, C: Idx, { columns: usize, vector: IndexVec<R, BitArray<C>>, } impl<R: Idx, C: Idx> SparseBitMatrix<R, C> { /// Create a new empty sparse bit matrix with no rows or columns. pub fn new(columns: usize) -> Self { Self { columns, vector: IndexVec::new(), } } fn ensure_row(&mut self, row: R) { let columns = self.columns; self.vector .ensure_contains_elem(row, || BitArray::new(columns)); } /// Sets the cell at `(row, column)` to true. Put another way, insert /// `column` to the bitset for `row`. /// /// Returns true if this changed the matrix, and false otherwise. pub fn add(&mut self, row: R, column: C) -> bool { self.ensure_row(row); self.vector[row].insert(column) } /// Do the bits from `row` contain `column`? Put another way, is /// the matrix cell at `(row, column)` true? Put yet another way, /// if the matrix represents (transitive) reachability, can /// `row` reach `column`? pub fn contains(&self, row: R, column: C) -> bool { self.vector.get(row).map_or(false, |r| r.contains(column)) } /// Add the bits from row `read` to the bits from row `write`, /// return true if anything changed. /// /// This is used when computing transitive reachability because if /// you have an edge `write -> read`, because in that case /// `write` can reach everything that `read` can (and /// potentially more). pub fn merge(&mut self, read: R, write: R) -> bool { if read == write || self.vector.get(read).is_none() { return false; } self.ensure_row(write); let (bitvec_read, bitvec_write) = self.vector.pick2_mut(read, write); bitvec_write.merge(bitvec_read) } /// Merge a row, `from`, into the `into` row. pub fn merge_into(&mut self, into: R, from: &BitArray<C>) -> bool { self.ensure_row(into); self.vector[into].merge(from) } /// Add all bits to the given row. pub fn add_all(&mut self, row: R) { self.ensure_row(row); self.vector[row].insert_all(); } /// Number of elements in the matrix. pub fn len(&self) -> usize { self.vector.len() } pub fn rows(&self) -> impl Iterator<Item = R> { self.vector.indices() } /// Iterates through all the columns set to true in a given row of /// the matrix. pub fn iter<'a>(&'a self, row: R) -> impl Iterator<Item = C> + 'a { self.vector.get(row).into_iter().flat_map(|r| r.iter()) } /// Iterates through each row and the accompanying bit set. pub fn iter_enumerated<'a>(&'a self) -> impl Iterator<Item = (R, &'a BitArray<C>)> + 'a { self.vector.iter_enumerated() } pub fn row(&self, row: R) -> Option<&BitArray<C>> { self.vector.get(row) } } #[inline] fn words<C: Idx>(elements: C) -> usize { (elements.index() + WORD_BITS - 1) / WORD_BITS } #[inline] fn word_mask<C: Idx>(index: C) -> (usize, Word) { let index = index.index(); let word = index / WORD_BITS; let mask = 1 << (index % WORD_BITS); (word, mask) } #[test] fn bitvec_iter_works() { let mut bitvec: BitArray<usize> = BitArray::new(100); bitvec.insert(1); bitvec.insert(10); bitvec.insert(19); bitvec.insert(62); bitvec.insert(63); bitvec.insert(64); bitvec.insert(65); bitvec.insert(66); bitvec.insert(99); assert_eq!( bitvec.iter().collect::<Vec<_>>(), [1, 10, 19, 62, 63, 64, 65, 66, 99] ); } #[test] fn bitvec_iter_works_2() { let mut bitvec: BitArray<usize> = BitArray::new(319); bitvec.insert(0); bitvec.insert(127); bitvec.insert(191); bitvec.insert(255); bitvec.insert(319); assert_eq!(bitvec.iter().collect::<Vec<_>>(), [0, 127, 191, 255, 319]); } #[test] fn union_two_vecs() { let mut vec1: BitArray<usize> = BitArray::new(65); let mut vec2: BitArray<usize> = BitArray::new(65); assert!(vec1.insert(3)); assert!(!vec1.insert(3)); assert!(vec2.insert(5)); assert!(vec2.insert(64)); assert!(vec1.merge(&vec2)); assert!(!vec1.merge(&vec2)); assert!(vec1.contains(3)); assert!(!vec1.contains(4)); assert!(vec1.contains(5)); assert!(!vec1.contains(63)); assert!(vec1.contains(64)); } #[test] fn grow() { let mut vec1: BitVector<usize> = BitVector::with_capacity(65); for index in 0..65 { assert!(vec1.insert(index)); assert!(!vec1.insert(index)); } vec1.grow(128); // Check if the bits set before growing are still set for index in 0..65 { assert!(vec1.contains(index)); } // Check if the new bits are all un-set for index in 65..128 { assert!(!vec1.contains(index)); } // Check that we can set all new bits without running out of bounds for index in 65..128 { assert!(vec1.insert(index)); assert!(!vec1.insert(index)); } } #[test] fn matrix_intersection() { let mut vec1: BitMatrix<usize, usize> = BitMatrix::new(200, 200); // (*) Elements reachable from both 2 and 65. vec1.add(2, 3); vec1.add(2, 6); vec1.add(2, 10); // (*) vec1.add(2, 64); // (*) vec1.add(2, 65); vec1.add(2, 130); vec1.add(2, 160); // (*) vec1.add(64, 133); vec1.add(65, 2); vec1.add(65, 8); vec1.add(65, 10); // (*) vec1.add(65, 64); // (*) vec1.add(65, 68); vec1.add(65, 133); vec1.add(65, 160); // (*) let intersection = vec1.intersection(2, 64); assert!(intersection.is_empty()); let intersection = vec1.intersection(2, 65); assert_eq!(intersection, &[10, 64, 160]); } #[test] fn matrix_iter() { let mut matrix: BitMatrix<usize, usize> = BitMatrix::new(64, 100); matrix.add(3, 22); matrix.add(3, 75); matrix.add(2, 99); matrix.add(4, 0); matrix.merge(3, 5); let expected = [99]; let mut iter = expected.iter(); for i in matrix.iter(2) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); let expected = [22, 75]; let mut iter = expected.iter(); for i in matrix.iter(3) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); let expected = [0]; let mut iter = expected.iter(); for i in matrix.iter(4) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); let expected = [22, 75]; let mut iter = expected.iter(); for i in matrix.iter(5) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); } #[test] fn sparse_matrix_iter() { let mut matrix: SparseBitMatrix<usize, usize> = SparseBitMatrix::new(100); matrix.add(3, 22); matrix.add(3, 75); matrix.add(2, 99); matrix.add(4, 0); matrix.merge(3, 5); let expected = [99]; let mut iter = expected.iter(); for i in matrix.iter(2) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); let expected = [22, 75]; let mut iter = expected.iter(); for i in matrix.iter(3) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); let expected = [0]; let mut iter = expected.iter(); for i in matrix.iter(4) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); let expected = [22, 75]; let mut iter = expected.iter(); for i in matrix.iter(5) { let j = *iter.next().unwrap(); assert_eq!(i, j); } assert!(iter.next().is_none()); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/const_cstr.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /// This macro creates a zero-overhead &CStr by adding a NUL terminator to /// the string literal passed into it at compile-time. Use it like: /// /// ``` /// let some_const_cstr = const_cstr!("abc"); /// ``` /// /// The above is roughly equivalent to: /// /// ``` /// let some_const_cstr = CStr::from_bytes_with_nul(b"abc\0").unwrap() /// ``` /// /// Note that macro only checks the string literal for internal NULs if /// debug-assertions are enabled in order to avoid runtime overhead in release /// builds. #[macro_export] macro_rules! const_cstr { ($s:expr) => ({ use std::ffi::CStr; let str_plus_nul = concat!($s, "\0"); if cfg!(debug_assertions) { CStr::from_bytes_with_nul(str_plus_nul.as_bytes()).unwrap() } else { unsafe { CStr::from_bytes_with_nul_unchecked(str_plus_nul.as_bytes()) } } }) }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/lib.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Various data structures used by the Rust compiler. The intention //! is that code in here should be not be *specific* to rustc, so that //! it can be easily unit tested and so forth. //! //! # Note //! //! This API is completely unstable and subject to change. #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "https://www.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(unboxed_closures)] #![feature(fn_traits)] #![feature(unsize)] #![feature(specialization)] #![feature(optin_builtin_traits)] #![cfg_attr(stage0, feature(macro_vis_matcher))] #![cfg_attr(not(stage0), feature(nll))] #![feature(allow_internal_unstable)] #![feature(vec_resize_with)] #![cfg_attr(unix, feature(libc))] #![cfg_attr(test, feature(test))] extern crate core; extern crate ena; #[macro_use] extern crate log; extern crate serialize as rustc_serialize; // used by deriving #[cfg(unix)] extern crate libc; extern crate parking_lot; #[macro_use] extern crate cfg_if; extern crate stable_deref_trait; extern crate rustc_rayon as rayon; extern crate rustc_rayon_core as rayon_core; extern crate rustc_hash; extern crate serialize; #[cfg_attr(test, macro_use)] extern crate smallvec; // See librustc_cratesio_shim/Cargo.toml for a comment explaining this. #[allow(unused_extern_crates)] extern crate rustc_cratesio_shim; pub use rustc_serialize::hex::ToHex; pub mod svh; pub mod accumulate_vec; pub mod array_vec; pub mod base_n; pub mod bitslice; pub mod bitvec; pub mod const_cstr; pub mod flock; pub mod fx; pub mod graph; pub mod indexed_set; pub mod indexed_vec; pub mod obligation_forest; pub mod owning_ref; pub mod ptr_key; pub mod sip128; pub mod small_c_str; pub mod small_vec; pub mod snapshot_map; pub use ena::snapshot_vec; pub mod sorted_map; #[macro_use] pub mod stable_hasher; pub mod sync; pub mod tiny_list; pub mod thin_vec; pub mod transitive_relation; pub mod tuple_slice; pub use ena::unify; pub mod work_queue; pub mod fingerprint; pub struct OnDrop<F: Fn()>(pub F); impl<F: Fn()> OnDrop<F> { /// Forgets the function which prevents it from running. /// Ensure that the function owns no memory, otherwise it will be leaked. pub fn disable(self) { std::mem::forget(self); } } impl<F: Fn()> Drop for OnDrop<F> { fn drop(&mut self) { (self.0)(); } } // See comments in src/librustc/lib.rs #[doc(hidden)] pub fn __noop_fix_for_27438() {}
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/base_n.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /// Convert unsigned integers into a string representation with some base. /// Bases up to and including 36 can be used for case-insensitive things. use std::str; pub const MAX_BASE: usize = 64; pub const ALPHANUMERIC_ONLY: usize = 62; pub const CASE_INSENSITIVE: usize = 36; const BASE_64: &[u8; MAX_BASE as usize] = b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@$"; #[inline] pub fn push_str(mut n: u128, base: usize, output: &mut String) { debug_assert!(base >= 2 && base <= MAX_BASE); let mut s = [0u8; 128]; let mut index = 0; let base = base as u128; loop { s[index] = BASE_64[(n % base) as usize]; index += 1; n /= base; if n == 0 { break; } } s[0..index].reverse(); output.push_str(str::from_utf8(&s[0..index]).unwrap()); } #[inline] pub fn encode(n: u128, base: usize) -> String { let mut s = String::new(); push_str(n, base, &mut s); s } #[test] fn test_encode() { fn test(n: u128, base: usize) { assert_eq!(Ok(n), u128::from_str_radix(&encode(n, base), base as u32)); } for base in 2..37 { test(0, base); test(1, base); test(35, base); test(36, base); test(37, base); test(u64::max_value() as u128, base); test(u128::max_value(), base); for i in 0 .. 1_000 { test(i * 983, base); } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/stable_hasher.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::hash::{Hash, Hasher, BuildHasher}; use std::marker::PhantomData; use std::mem; use sip128::SipHasher128; /// When hashing something that ends up affecting properties like symbol names, /// we want these symbol names to be calculated independently of other factors /// like what architecture you're compiling *from*. /// /// To that end we always convert integers to little-endian format before /// hashing and the architecture dependent `isize` and `usize` types are /// extended to 64 bits if needed. pub struct StableHasher<W> { state: SipHasher128, bytes_hashed: u64, width: PhantomData<W>, } impl<W: StableHasherResult> ::std::fmt::Debug for StableHasher<W> { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { write!(f, "{:?}", self.state) } } pub trait StableHasherResult: Sized { fn finish(hasher: StableHasher<Self>) -> Self; } impl<W: StableHasherResult> StableHasher<W> { pub fn new() -> Self { StableHasher { state: SipHasher128::new_with_keys(0, 0), bytes_hashed: 0, width: PhantomData, } } pub fn finish(self) -> W { W::finish(self) } } impl StableHasherResult for u128 { fn finish(hasher: StableHasher<Self>) -> Self { let (_0, _1) = hasher.finalize(); (_0 as u128) | ((_1 as u128) << 64) } } impl StableHasherResult for u64 { fn finish(hasher: StableHasher<Self>) -> Self { hasher.finalize().0 } } impl<W> StableHasher<W> { #[inline] pub fn finalize(self) -> (u64, u64) { self.state.finish128() } #[inline] pub fn bytes_hashed(&self) -> u64 { self.bytes_hashed } } impl<W> Hasher for StableHasher<W> { fn finish(&self) -> u64 { panic!("use StableHasher::finalize instead"); } #[inline] fn write(&mut self, bytes: &[u8]) { self.state.write(bytes); self.bytes_hashed += bytes.len() as u64; } #[inline] fn write_u8(&mut self, i: u8) { self.state.write_u8(i); self.bytes_hashed += 1; } #[inline] fn write_u16(&mut self, i: u16) { self.state.write_u16(i.to_le()); self.bytes_hashed += 2; } #[inline] fn write_u32(&mut self, i: u32) { self.state.write_u32(i.to_le()); self.bytes_hashed += 4; } #[inline] fn write_u64(&mut self, i: u64) { self.state.write_u64(i.to_le()); self.bytes_hashed += 8; } #[inline] fn write_u128(&mut self, i: u128) { self.state.write_u128(i.to_le()); self.bytes_hashed += 16; } #[inline] fn write_usize(&mut self, i: usize) { // Always treat usize as u64 so we get the same results on 32 and 64 bit // platforms. This is important for symbol hashes when cross compiling, // for example. self.state.write_u64((i as u64).to_le()); self.bytes_hashed += 8; } #[inline] fn write_i8(&mut self, i: i8) { self.state.write_i8(i); self.bytes_hashed += 1; } #[inline] fn write_i16(&mut self, i: i16) { self.state.write_i16(i.to_le()); self.bytes_hashed += 2; } #[inline] fn write_i32(&mut self, i: i32) { self.state.write_i32(i.to_le()); self.bytes_hashed += 4; } #[inline] fn write_i64(&mut self, i: i64) { self.state.write_i64(i.to_le()); self.bytes_hashed += 8; } #[inline] fn write_i128(&mut self, i: i128) { self.state.write_i128(i.to_le()); self.bytes_hashed += 16; } #[inline] fn write_isize(&mut self, i: isize) { // Always treat isize as i64 so we get the same results on 32 and 64 bit // platforms. This is important for symbol hashes when cross compiling, // for example. self.state.write_i64((i as i64).to_le()); self.bytes_hashed += 8; } } /// Something that implements `HashStable<CTX>` can be hashed in a way that is /// stable across multiple compilation sessions. pub trait HashStable<CTX> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>); } /// Implement this for types that can be turned into stable keys like, for /// example, for DefId that can be converted to a DefPathHash. This is used for /// bringing maps into a predictable order before hashing them. pub trait ToStableHashKey<HCX> { type KeyType: Ord + Clone + Sized + HashStable<HCX>; fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType; } // Implement HashStable by just calling `Hash::hash()`. This works fine for // self-contained values that don't depend on the hashing context `CTX`. #[macro_export] macro_rules! impl_stable_hash_via_hash { ($t:ty) => ( impl<CTX> $crate::stable_hasher::HashStable<CTX> for $t { #[inline] fn hash_stable<W: $crate::stable_hasher::StableHasherResult>( &self, _: &mut CTX, hasher: &mut $crate::stable_hasher::StableHasher<W> ) { ::std::hash::Hash::hash(self, hasher); } } ); } impl_stable_hash_via_hash!(i8); impl_stable_hash_via_hash!(i16); impl_stable_hash_via_hash!(i32); impl_stable_hash_via_hash!(i64); impl_stable_hash_via_hash!(isize); impl_stable_hash_via_hash!(u8); impl_stable_hash_via_hash!(u16); impl_stable_hash_via_hash!(u32); impl_stable_hash_via_hash!(u64); impl_stable_hash_via_hash!(usize); impl_stable_hash_via_hash!(u128); impl_stable_hash_via_hash!(i128); impl_stable_hash_via_hash!(char); impl_stable_hash_via_hash!(()); impl<CTX> HashStable<CTX> for f32 { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { let val: u32 = unsafe { ::std::mem::transmute(*self) }; val.hash_stable(ctx, hasher); } } impl<CTX> HashStable<CTX> for f64 { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { let val: u64 = unsafe { ::std::mem::transmute(*self) }; val.hash_stable(ctx, hasher); } } impl<CTX> HashStable<CTX> for ::std::cmp::Ordering { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { (*self as i8).hash_stable(ctx, hasher); } } impl<T1: HashStable<CTX>, CTX> HashStable<CTX> for (T1,) { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { let (ref _0,) = *self; _0.hash_stable(ctx, hasher); } } impl<T1: HashStable<CTX>, T2: HashStable<CTX>, CTX> HashStable<CTX> for (T1, T2) { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { let (ref _0, ref _1) = *self; _0.hash_stable(ctx, hasher); _1.hash_stable(ctx, hasher); } } impl<T1, T2, T3, CTX> HashStable<CTX> for (T1, T2, T3) where T1: HashStable<CTX>, T2: HashStable<CTX>, T3: HashStable<CTX>, { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { let (ref _0, ref _1, ref _2) = *self; _0.hash_stable(ctx, hasher); _1.hash_stable(ctx, hasher); _2.hash_stable(ctx, hasher); } } impl<T: HashStable<CTX>, CTX> HashStable<CTX> for [T] { default fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { self.len().hash_stable(ctx, hasher); for item in self { item.hash_stable(ctx, hasher); } } } impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Vec<T> { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { (&self[..]).hash_stable(ctx, hasher); } } impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for Box<T> { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { (**self).hash_stable(ctx, hasher); } } impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for ::std::rc::Rc<T> { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { (**self).hash_stable(ctx, hasher); } } impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for ::std::sync::Arc<T> { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { (**self).hash_stable(ctx, hasher); } } impl<CTX> HashStable<CTX> for str { #[inline] fn hash_stable<W: StableHasherResult>(&self, _: &mut CTX, hasher: &mut StableHasher<W>) { self.len().hash(hasher); self.as_bytes().hash(hasher); } } impl<CTX> HashStable<CTX> for String { #[inline] fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) { (&self[..]).hash_stable(hcx, hasher); } } impl<HCX> ToStableHashKey<HCX> for String { type KeyType = String; #[inline] fn to_stable_hash_key(&self, _: &HCX) -> Self::KeyType { self.clone() } } impl<CTX> HashStable<CTX> for bool { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { (if *self { 1u8 } else { 0u8 }).hash_stable(ctx, hasher); } } impl<T, CTX> HashStable<CTX> for Option<T> where T: HashStable<CTX> { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { if let Some(ref value) = *self { 1u8.hash_stable(ctx, hasher); value.hash_stable(ctx, hasher); } else { 0u8.hash_stable(ctx, hasher); } } } impl<T1, T2, CTX> HashStable<CTX> for Result<T1, T2> where T1: HashStable<CTX>, T2: HashStable<CTX>, { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(ctx, hasher); match *self { Ok(ref x) => x.hash_stable(ctx, hasher), Err(ref x) => x.hash_stable(ctx, hasher), } } } impl<'a, T, CTX> HashStable<CTX> for &'a T where T: HashStable<CTX> + ?Sized { #[inline] fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { (**self).hash_stable(ctx, hasher); } } impl<T, CTX> HashStable<CTX> for ::std::mem::Discriminant<T> { #[inline] fn hash_stable<W: StableHasherResult>(&self, _: &mut CTX, hasher: &mut StableHasher<W>) { ::std::hash::Hash::hash(self, hasher); } } impl<I: ::indexed_vec::Idx, T, CTX> HashStable<CTX> for ::indexed_vec::IndexVec<I, T> where T: HashStable<CTX>, { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { self.len().hash_stable(ctx, hasher); for v in &self.raw { v.hash_stable(ctx, hasher); } } } impl<I: ::indexed_vec::Idx, CTX> HashStable<CTX> for ::indexed_set::IdxSet<I> { fn hash_stable<W: StableHasherResult>(&self, ctx: &mut CTX, hasher: &mut StableHasher<W>) { self.words().hash_stable(ctx, hasher); } } impl_stable_hash_via_hash!(::std::path::Path); impl_stable_hash_via_hash!(::std::path::PathBuf); impl<K, V, R, HCX> HashStable<HCX> for ::std::collections::HashMap<K, V, R> where K: ToStableHashKey<HCX> + Eq + Hash, V: HashStable<HCX>, R: BuildHasher, { #[inline] fn hash_stable<W: StableHasherResult>(&self, hcx: &mut HCX, hasher: &mut StableHasher<W>) { hash_stable_hashmap(hcx, hasher, self, ToStableHashKey::to_stable_hash_key); } } impl<K, R, HCX> HashStable<HCX> for ::std::collections::HashSet<K, R> where K: ToStableHashKey<HCX> + Eq + Hash, R: BuildHasher, { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut HCX, hasher: &mut StableHasher<W>) { let mut keys: Vec<_> = self.iter() .map(|k| k.to_stable_hash_key(hcx)) .collect(); keys.sort_unstable(); keys.hash_stable(hcx, hasher); } } impl<K, V, HCX> HashStable<HCX> for ::std::collections::BTreeMap<K, V> where K: ToStableHashKey<HCX>, V: HashStable<HCX>, { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut HCX, hasher: &mut StableHasher<W>) { let mut entries: Vec<_> = self.iter() .map(|(k, v)| (k.to_stable_hash_key(hcx), v)) .collect(); entries.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2)); entries.hash_stable(hcx, hasher); } } impl<K, HCX> HashStable<HCX> for ::std::collections::BTreeSet<K> where K: ToStableHashKey<HCX>, { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut HCX, hasher: &mut StableHasher<W>) { let mut keys: Vec<_> = self.iter() .map(|k| k.to_stable_hash_key(hcx)) .collect(); keys.sort_unstable(); keys.hash_stable(hcx, hasher); } } pub fn hash_stable_hashmap<HCX, K, V, R, SK, F, W>( hcx: &mut HCX, hasher: &mut StableHasher<W>, map: &::std::collections::HashMap<K, V, R>, to_stable_hash_key: F) where K: Eq + Hash, V: HashStable<HCX>, R: BuildHasher, SK: HashStable<HCX> + Ord + Clone, F: Fn(&K, &HCX) -> SK, W: StableHasherResult, { let mut entries: Vec<_> = map.iter() .map(|(k, v)| (to_stable_hash_key(k, hcx), v)) .collect(); entries.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2)); entries.hash_stable(hcx, hasher); } /// A vector container that makes sure that its items are hashed in a stable /// order. pub struct StableVec<T>(Vec<T>); impl<T> StableVec<T> { pub fn new(v: Vec<T>) -> Self { StableVec(v) } } impl<T> ::std::ops::Deref for StableVec<T> { type Target = Vec<T>; fn deref(&self) -> &Vec<T> { &self.0 } } impl<T, HCX> HashStable<HCX> for StableVec<T> where T: HashStable<HCX> + ToStableHashKey<HCX> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut HCX, hasher: &mut StableHasher<W>) { let StableVec(ref v) = *self; let mut sorted: Vec<_> = v.iter() .map(|x| x.to_stable_hash_key(hcx)) .collect(); sorted.sort_unstable(); sorted.hash_stable(hcx, hasher); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/sync.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This module defines types which are thread safe if cfg!(parallel_queries) is true. //! //! `Lrc` is an alias of either Rc or Arc. //! //! `Lock` is a mutex. //! It internally uses `parking_lot::Mutex` if cfg!(parallel_queries) is true, //! `RefCell` otherwise. //! //! `RwLock` is a read-write lock. //! It internally uses `parking_lot::RwLock` if cfg!(parallel_queries) is true, //! `RefCell` otherwise. //! //! `LockCell` is a thread safe version of `Cell`, with `set` and `get` operations. //! It can never deadlock. It uses `Cell` when //! cfg!(parallel_queries) is false, otherwise it is a `Lock`. //! //! `MTLock` is a mutex which disappears if cfg!(parallel_queries) is false. //! //! `MTRef` is a immutable reference if cfg!(parallel_queries), and an mutable reference otherwise. //! //! `rustc_erase_owner!` erases a OwningRef owner into Erased or Erased + Send + Sync //! depending on the value of cfg!(parallel_queries). use std::collections::HashMap; use std::hash::{Hash, BuildHasher}; use std::cmp::Ordering; use std::marker::PhantomData; use std::fmt::Debug; use std::fmt::Formatter; use std::fmt; use std::ops::{Deref, DerefMut}; use owning_ref::{Erased, OwningRef}; pub fn serial_join<A, B, RA, RB>(oper_a: A, oper_b: B) -> (RA, RB) where A: FnOnce() -> RA, B: FnOnce() -> RB { (oper_a(), oper_b()) } pub struct SerialScope; impl SerialScope { pub fn spawn<F>(&self, f: F) where F: FnOnce(&SerialScope) { f(self) } } pub fn serial_scope<F, R>(f: F) -> R where F: FnOnce(&SerialScope) -> R { f(&SerialScope) } cfg_if! { if #[cfg(not(parallel_queries))] { pub auto trait Send {} pub auto trait Sync {} impl<T: ?Sized> Send for T {} impl<T: ?Sized> Sync for T {} #[macro_export] macro_rules! rustc_erase_owner { ($v:expr) => { $v.erase_owner() } } pub use self::serial_join as join; pub use self::serial_scope as scope; pub use std::iter::Iterator as ParallelIterator; pub fn par_iter<T: IntoIterator>(t: T) -> T::IntoIter { t.into_iter() } pub type MetadataRef = OwningRef<Box<dyn Erased>, [u8]>; pub use std::rc::Rc as Lrc; pub use std::rc::Weak as Weak; pub use std::cell::Ref as ReadGuard; pub use std::cell::RefMut as WriteGuard; pub use std::cell::RefMut as LockGuard; use std::cell::RefCell as InnerRwLock; use std::cell::RefCell as InnerLock; use std::cell::Cell; #[derive(Debug)] pub struct WorkerLocal<T>(OneThread<T>); impl<T> WorkerLocal<T> { /// Creates a new worker local where the `initial` closure computes the /// value this worker local should take for each thread in the thread pool. #[inline] pub fn new<F: FnMut(usize) -> T>(mut f: F) -> WorkerLocal<T> { WorkerLocal(OneThread::new(f(0))) } /// Returns the worker-local value for each thread #[inline] pub fn into_inner(self) -> Vec<T> { vec![OneThread::into_inner(self.0)] } } impl<T> Deref for WorkerLocal<T> { type Target = T; #[inline(always)] fn deref(&self) -> &T { &*self.0 } } pub type MTRef<'a, T> = &'a mut T; #[derive(Debug)] pub struct MTLock<T>(T); impl<T> MTLock<T> { #[inline(always)] pub fn new(inner: T) -> Self { MTLock(inner) } #[inline(always)] pub fn into_inner(self) -> T { self.0 } #[inline(always)] pub fn get_mut(&mut self) -> &mut T { &mut self.0 } #[inline(always)] pub fn lock(&self) -> &T { &self.0 } #[inline(always)] pub fn lock_mut(&mut self) -> &mut T { &mut self.0 } } // FIXME: Probably a bad idea (in the threaded case) impl<T: Clone> Clone for MTLock<T> { #[inline] fn clone(&self) -> Self { MTLock(self.0.clone()) } } pub struct LockCell<T>(Cell<T>); impl<T> LockCell<T> { #[inline(always)] pub fn new(inner: T) -> Self { LockCell(Cell::new(inner)) } #[inline(always)] pub fn into_inner(self) -> T { self.0.into_inner() } #[inline(always)] pub fn set(&self, new_inner: T) { self.0.set(new_inner); } #[inline(always)] pub fn get(&self) -> T where T: Copy { self.0.get() } #[inline(always)] pub fn set_mut(&mut self, new_inner: T) { self.0.set(new_inner); } #[inline(always)] pub fn get_mut(&mut self) -> T where T: Copy { self.0.get() } } impl<T> LockCell<Option<T>> { #[inline(always)] pub fn take(&self) -> Option<T> { unsafe { (*self.0.as_ptr()).take() } } } } else { pub use std::marker::Send as Send; pub use std::marker::Sync as Sync; pub use parking_lot::RwLockReadGuard as ReadGuard; pub use parking_lot::RwLockWriteGuard as WriteGuard; pub use parking_lot::MutexGuard as LockGuard; pub use std::sync::Arc as Lrc; pub use std::sync::Weak as Weak; pub type MTRef<'a, T> = &'a T; #[derive(Debug)] pub struct MTLock<T>(Lock<T>); impl<T> MTLock<T> { #[inline(always)] pub fn new(inner: T) -> Self { MTLock(Lock::new(inner)) } #[inline(always)] pub fn into_inner(self) -> T { self.0.into_inner() } #[inline(always)] pub fn get_mut(&mut self) -> &mut T { self.0.get_mut() } #[inline(always)] pub fn lock(&self) -> LockGuard<T> { self.0.lock() } #[inline(always)] pub fn lock_mut(&self) -> LockGuard<T> { self.lock() } } use parking_lot::Mutex as InnerLock; use parking_lot::RwLock as InnerRwLock; use std; use std::thread; pub use rayon::{join, scope}; pub use rayon_core::WorkerLocal; pub use rayon::iter::ParallelIterator; use rayon::iter::IntoParallelIterator; pub fn par_iter<T: IntoParallelIterator>(t: T) -> T::Iter { t.into_par_iter() } pub type MetadataRef = OwningRef<Box<dyn Erased + Send + Sync>, [u8]>; /// This makes locks panic if they are already held. /// It is only useful when you are running in a single thread const ERROR_CHECKING: bool = false; #[macro_export] macro_rules! rustc_erase_owner { ($v:expr) => {{ let v = $v; ::rustc_data_structures::sync::assert_send_val(&v); v.erase_send_sync_owner() }} } pub struct LockCell<T>(Lock<T>); impl<T> LockCell<T> { #[inline(always)] pub fn new(inner: T) -> Self { LockCell(Lock::new(inner)) } #[inline(always)] pub fn into_inner(self) -> T { self.0.into_inner() } #[inline(always)] pub fn set(&self, new_inner: T) { *self.0.lock() = new_inner; } #[inline(always)] pub fn get(&self) -> T where T: Copy { *self.0.lock() } #[inline(always)] pub fn set_mut(&mut self, new_inner: T) { *self.0.get_mut() = new_inner; } #[inline(always)] pub fn get_mut(&mut self) -> T where T: Copy { *self.0.get_mut() } } impl<T> LockCell<Option<T>> { #[inline(always)] pub fn take(&self) -> Option<T> { self.0.lock().take() } } } } pub fn assert_sync<T: ?Sized + Sync>() {} pub fn assert_send_val<T: ?Sized + Send>(_t: &T) {} pub fn assert_send_sync_val<T: ?Sized + Sync + Send>(_t: &T) {} pub trait HashMapExt<K, V> { /// Same as HashMap::insert, but it may panic if there's already an /// entry for `key` with a value not equal to `value` fn insert_same(&mut self, key: K, value: V); } impl<K: Eq + Hash, V: Eq, S: BuildHasher> HashMapExt<K, V> for HashMap<K, V, S> { fn insert_same(&mut self, key: K, value: V) { self.entry(key).and_modify(|old| assert!(*old == value)).or_insert(value); } } /// A type whose inner value can be written once and then will stay read-only // This contains a PhantomData<T> since this type conceptually owns a T outside the Mutex once // initialized. This ensures that Once<T> is Sync only if T is. If we did not have PhantomData<T> // we could send a &Once<Cell<bool>> to multiple threads and call `get` on it to get access // to &Cell<bool> on those threads. pub struct Once<T>(Lock<Option<T>>, PhantomData<T>); impl<T> Once<T> { /// Creates an Once value which is uninitialized #[inline(always)] pub fn new() -> Self { Once(Lock::new(None), PhantomData) } /// Consumes the value and returns Some(T) if it was initialized #[inline(always)] pub fn into_inner(self) -> Option<T> { self.0.into_inner() } /// Tries to initialize the inner value to `value`. /// Returns `None` if the inner value was uninitialized and `value` was consumed setting it /// otherwise if the inner value was already set it returns `value` back to the caller #[inline] pub fn try_set(&self, value: T) -> Option<T> { let mut lock = self.0.lock(); if lock.is_some() { return Some(value); } *lock = Some(value); None } /// Tries to initialize the inner value to `value`. /// Returns `None` if the inner value was uninitialized and `value` was consumed setting it /// otherwise if the inner value was already set it asserts that `value` is equal to the inner /// value and then returns `value` back to the caller #[inline] pub fn try_set_same(&self, value: T) -> Option<T> where T: Eq { let mut lock = self.0.lock(); if let Some(ref inner) = *lock { assert!(*inner == value); return Some(value); } *lock = Some(value); None } /// Tries to initialize the inner value to `value` and panics if it was already initialized #[inline] pub fn set(&self, value: T) { assert!(self.try_set(value).is_none()); } /// Tries to initialize the inner value by calling the closure while ensuring that no-one else /// can access the value in the mean time by holding a lock for the duration of the closure. /// If the value was already initialized the closure is not called and `false` is returned, /// otherwise if the value from the closure initializes the inner value, `true` is returned #[inline] pub fn init_locking<F: FnOnce() -> T>(&self, f: F) -> bool { let mut lock = self.0.lock(); if lock.is_some() { return false; } *lock = Some(f()); true } /// Tries to initialize the inner value by calling the closure without ensuring that no-one /// else can access it. This mean when this is called from multiple threads, multiple /// closures may concurrently be computing a value which the inner value should take. /// Only one of these closures are used to actually initialize the value. /// If some other closure already set the value, /// we return the value our closure computed wrapped in a `Option`. /// If our closure set the value, `None` is returned. /// If the value is already initialized, the closure is not called and `None` is returned. #[inline] pub fn init_nonlocking<F: FnOnce() -> T>(&self, f: F) -> Option<T> { if self.0.lock().is_some() { None } else { self.try_set(f()) } } /// Tries to initialize the inner value by calling the closure without ensuring that no-one /// else can access it. This mean when this is called from multiple threads, multiple /// closures may concurrently be computing a value which the inner value should take. /// Only one of these closures are used to actually initialize the value. /// If some other closure already set the value, we assert that it our closure computed /// a value equal to the value already set and then /// we return the value our closure computed wrapped in a `Option`. /// If our closure set the value, `None` is returned. /// If the value is already initialized, the closure is not called and `None` is returned. #[inline] pub fn init_nonlocking_same<F: FnOnce() -> T>(&self, f: F) -> Option<T> where T: Eq { if self.0.lock().is_some() { None } else { self.try_set_same(f()) } } /// Tries to get a reference to the inner value, returns `None` if it is not yet initialized #[inline(always)] pub fn try_get(&self) -> Option<&T> { let lock = &*self.0.lock(); if let Some(ref inner) = *lock { // This is safe since we won't mutate the inner value unsafe { Some(&*(inner as *const T)) } } else { None } } /// Gets reference to the inner value, panics if it is not yet initialized #[inline(always)] pub fn get(&self) -> &T { self.try_get().expect("value was not set") } /// Gets reference to the inner value, panics if it is not yet initialized #[inline(always)] pub fn borrow(&self) -> &T { self.get() } } impl<T: Copy + Debug> Debug for LockCell<T> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { f.debug_struct("LockCell") .field("value", &self.get()) .finish() } } impl<T:Default> Default for LockCell<T> { /// Creates a `LockCell<T>`, with the `Default` value for T. #[inline] fn default() -> LockCell<T> { LockCell::new(Default::default()) } } impl<T:PartialEq + Copy> PartialEq for LockCell<T> { #[inline] fn eq(&self, other: &LockCell<T>) -> bool { self.get() == other.get() } } impl<T:Eq + Copy> Eq for LockCell<T> {} impl<T:PartialOrd + Copy> PartialOrd for LockCell<T> { #[inline] fn partial_cmp(&self, other: &LockCell<T>) -> Option<Ordering> { self.get().partial_cmp(&other.get()) } #[inline] fn lt(&self, other: &LockCell<T>) -> bool { self.get() < other.get() } #[inline] fn le(&self, other: &LockCell<T>) -> bool { self.get() <= other.get() } #[inline] fn gt(&self, other: &LockCell<T>) -> bool { self.get() > other.get() } #[inline] fn ge(&self, other: &LockCell<T>) -> bool { self.get() >= other.get() } } impl<T:Ord + Copy> Ord for LockCell<T> { #[inline] fn cmp(&self, other: &LockCell<T>) -> Ordering { self.get().cmp(&other.get()) } } #[derive(Debug)] pub struct Lock<T>(InnerLock<T>); impl<T> Lock<T> { #[inline(always)] pub fn new(inner: T) -> Self { Lock(InnerLock::new(inner)) } #[inline(always)] pub fn into_inner(self) -> T { self.0.into_inner() } #[inline(always)] pub fn get_mut(&mut self) -> &mut T { self.0.get_mut() } #[cfg(parallel_queries)] #[inline(always)] pub fn try_lock(&self) -> Option<LockGuard<T>> { self.0.try_lock() } #[cfg(not(parallel_queries))] #[inline(always)] pub fn try_lock(&self) -> Option<LockGuard<T>> { self.0.try_borrow_mut().ok() } #[cfg(parallel_queries)] #[inline(always)] pub fn lock(&self) -> LockGuard<T> { if ERROR_CHECKING { self.0.try_lock().expect("lock was already held") } else { self.0.lock() } } #[cfg(not(parallel_queries))] #[inline(always)] pub fn lock(&self) -> LockGuard<T> { self.0.borrow_mut() } #[inline(always)] pub fn with_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R { f(&mut *self.lock()) } #[inline(always)] pub fn borrow(&self) -> LockGuard<T> { self.lock() } #[inline(always)] pub fn borrow_mut(&self) -> LockGuard<T> { self.lock() } } impl<T: Default> Default for Lock<T> { #[inline] fn default() -> Self { Lock::new(T::default()) } } // FIXME: Probably a bad idea impl<T: Clone> Clone for Lock<T> { #[inline] fn clone(&self) -> Self { Lock::new(self.borrow().clone()) } } #[derive(Debug)] pub struct RwLock<T>(InnerRwLock<T>); impl<T> RwLock<T> { #[inline(always)] pub fn new(inner: T) -> Self { RwLock(InnerRwLock::new(inner)) } #[inline(always)] pub fn into_inner(self) -> T { self.0.into_inner() } #[inline(always)] pub fn get_mut(&mut self) -> &mut T { self.0.get_mut() } #[cfg(not(parallel_queries))] #[inline(always)] pub fn read(&self) -> ReadGuard<T> { self.0.borrow() } #[cfg(parallel_queries)] #[inline(always)] pub fn read(&self) -> ReadGuard<T> { if ERROR_CHECKING { self.0.try_read().expect("lock was already held") } else { self.0.read() } } #[inline(always)] pub fn with_read_lock<F: FnOnce(&T) -> R, R>(&self, f: F) -> R { f(&*self.read()) } #[cfg(not(parallel_queries))] #[inline(always)] pub fn try_write(&self) -> Result<WriteGuard<T>, ()> { self.0.try_borrow_mut().map_err(|_| ()) } #[cfg(parallel_queries)] #[inline(always)] pub fn try_write(&self) -> Result<WriteGuard<T>, ()> { self.0.try_write().ok_or(()) } #[cfg(not(parallel_queries))] #[inline(always)] pub fn write(&self) -> WriteGuard<T> { self.0.borrow_mut() } #[cfg(parallel_queries)] #[inline(always)] pub fn write(&self) -> WriteGuard<T> { if ERROR_CHECKING { self.0.try_write().expect("lock was already held") } else { self.0.write() } } #[inline(always)] pub fn with_write_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R { f(&mut *self.write()) } #[inline(always)] pub fn borrow(&self) -> ReadGuard<T> { self.read() } #[inline(always)] pub fn borrow_mut(&self) -> WriteGuard<T> { self.write() } } // FIXME: Probably a bad idea impl<T: Clone> Clone for RwLock<T> { #[inline] fn clone(&self) -> Self { RwLock::new(self.borrow().clone()) } } /// A type which only allows its inner value to be used in one thread. /// It will panic if it is used on multiple threads. #[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)] pub struct OneThread<T> { #[cfg(parallel_queries)] thread: thread::ThreadId, inner: T, } #[cfg(parallel_queries)] unsafe impl<T> std::marker::Sync for OneThread<T> {} #[cfg(parallel_queries)] unsafe impl<T> std::marker::Send for OneThread<T> {} impl<T> OneThread<T> { #[inline(always)] fn check(&self) { #[cfg(parallel_queries)] assert_eq!(thread::current().id(), self.thread); } #[inline(always)] pub fn new(inner: T) -> Self { OneThread { #[cfg(parallel_queries)] thread: thread::current().id(), inner, } } #[inline(always)] pub fn into_inner(value: Self) -> T { value.check(); value.inner } } impl<T> Deref for OneThread<T> { type Target = T; fn deref(&self) -> &T { self.check(); &self.inner } } impl<T> DerefMut for OneThread<T> { fn deref_mut(&mut self) -> &mut T { self.check(); &mut self.inner } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/tiny_list.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A singly-linked list. //! //! Using this data structure only makes sense under very specific //! circumstances: //! //! - If you have a list that rarely stores more than one element, then this //! data-structure can store the element without allocating and only uses as //! much space as a `Option<(T, usize)>`. If T can double as the `Option` //! discriminant, it will even only be as large as `T, usize`. //! //! If you expect to store more than 1 element in the common case, steer clear //! and use a `Vec<T>`, `Box<[T]>`, or a `SmallVec<T>`. use std::mem; #[derive(Clone, Hash, Debug, PartialEq)] pub struct TinyList<T: PartialEq> { head: Option<Element<T>> } impl<T: PartialEq> TinyList<T> { #[inline] pub fn new() -> TinyList<T> { TinyList { head: None } } #[inline] pub fn new_single(data: T) -> TinyList<T> { TinyList { head: Some(Element { data, next: None, }) } } #[inline] pub fn insert(&mut self, data: T) { self.head = Some(Element { data, next: mem::replace(&mut self.head, None).map(Box::new), }); } #[inline] pub fn remove(&mut self, data: &T) -> bool { self.head = match self.head { Some(ref mut head) if head.data == *data => { mem::replace(&mut head.next, None).map(|x| *x) } Some(ref mut head) => return head.remove_next(data), None => return false, }; true } #[inline] pub fn contains(&self, data: &T) -> bool { if let Some(ref head) = self.head { head.contains(data) } else { false } } #[inline] pub fn len(&self) -> usize { if let Some(ref head) = self.head { head.len() } else { 0 } } } #[derive(Clone, Hash, Debug, PartialEq)] struct Element<T: PartialEq> { data: T, next: Option<Box<Element<T>>>, } impl<T: PartialEq> Element<T> { fn remove_next(&mut self, data: &T) -> bool { let new_next = if let Some(ref mut next) = self.next { if next.data != *data { return next.remove_next(data) } else { mem::replace(&mut next.next, None) } } else { return false }; self.next = new_next; true } fn len(&self) -> usize { if let Some(ref next) = self.next { 1 + next.len() } else { 1 } } fn contains(&self, data: &T) -> bool { if self.data == *data { return true } if let Some(ref next) = self.next { next.contains(data) } else { false } } } #[cfg(test)] mod test { use super::*; extern crate test; use self::test::Bencher; #[test] fn test_contains_and_insert() { fn do_insert(i : u32) -> bool { i % 2 == 0 } let mut list = TinyList::new(); for i in 0 .. 10 { for j in 0 .. i { if do_insert(j) { assert!(list.contains(&j)); } else { assert!(!list.contains(&j)); } } assert!(!list.contains(&i)); if do_insert(i) { list.insert(i); assert!(list.contains(&i)); } } } #[test] fn test_remove_first() { let mut list = TinyList::new(); list.insert(1); list.insert(2); list.insert(3); list.insert(4); assert_eq!(list.len(), 4); assert!(list.remove(&4)); assert!(!list.contains(&4)); assert_eq!(list.len(), 3); assert!(list.contains(&1)); assert!(list.contains(&2)); assert!(list.contains(&3)); } #[test] fn test_remove_last() { let mut list = TinyList::new(); list.insert(1); list.insert(2); list.insert(3); list.insert(4); assert_eq!(list.len(), 4); assert!(list.remove(&1)); assert!(!list.contains(&1)); assert_eq!(list.len(), 3); assert!(list.contains(&2)); assert!(list.contains(&3)); assert!(list.contains(&4)); } #[test] fn test_remove_middle() { let mut list = TinyList::new(); list.insert(1); list.insert(2); list.insert(3); list.insert(4); assert_eq!(list.len(), 4); assert!(list.remove(&2)); assert!(!list.contains(&2)); assert_eq!(list.len(), 3); assert!(list.contains(&1)); assert!(list.contains(&3)); assert!(list.contains(&4)); } #[test] fn test_remove_single() { let mut list = TinyList::new(); list.insert(1); assert_eq!(list.len(), 1); assert!(list.remove(&1)); assert!(!list.contains(&1)); assert_eq!(list.len(), 0); } #[bench] fn bench_insert_empty(b: &mut Bencher) { b.iter(|| { let mut list = TinyList::new(); list.insert(1); }) } #[bench] fn bench_insert_one(b: &mut Bencher) { b.iter(|| { let mut list = TinyList::new_single(0); list.insert(1); }) } #[bench] fn bench_remove_empty(b: &mut Bencher) { b.iter(|| { TinyList::new().remove(&1) }); } #[bench] fn bench_remove_unknown(b: &mut Bencher) { b.iter(|| { TinyList::new_single(0).remove(&1) }); } #[bench] fn bench_remove_one(b: &mut Bencher) { b.iter(|| { TinyList::new_single(1).remove(&1) }); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/ptr_key.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::{hash, ptr}; use std::ops::Deref; /// A wrapper around reference that compares and hashes like a pointer. /// Can be used as a key in sets/maps indexed by pointers to avoid `unsafe`. #[derive(Debug)] pub struct PtrKey<'a, T: 'a>(pub &'a T); impl<'a, T> Clone for PtrKey<'a, T> { fn clone(&self) -> Self { *self } } impl<'a, T> Copy for PtrKey<'a, T> {} impl<'a, T> PartialEq for PtrKey<'a, T> { fn eq(&self, rhs: &Self) -> bool { ptr::eq(self.0, rhs.0) } } impl<'a, T> Eq for PtrKey<'a, T> {} impl<'a, T> hash::Hash for PtrKey<'a, T> { fn hash<H: hash::Hasher>(&self, hasher: &mut H) { (self.0 as *const T).hash(hasher) } } impl<'a, T> Deref for PtrKey<'a, T> { type Target = T; fn deref(&self) -> &Self::Target { self.0 } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/small_c_str.rs
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::ffi; use std::ops::Deref; const SIZE: usize = 38; /// Like SmallVec but for C strings. #[derive(Clone)] pub enum SmallCStr { OnStack { data: [u8; SIZE], len_with_nul: u8, }, OnHeap { data: ffi::CString, } } impl SmallCStr { #[inline] pub fn new(s: &str) -> SmallCStr { if s.len() < SIZE { let mut data = [0; SIZE]; data[.. s.len()].copy_from_slice(s.as_bytes()); let len_with_nul = s.len() + 1; // Make sure once that this is a valid CStr if let Err(e) = ffi::CStr::from_bytes_with_nul(&data[.. len_with_nul]) { panic!("The string \"{}\" cannot be converted into a CStr: {}", s, e); } SmallCStr::OnStack { data, len_with_nul: len_with_nul as u8, } } else { SmallCStr::OnHeap { data: ffi::CString::new(s).unwrap() } } } #[inline] pub fn as_c_str(&self) -> &ffi::CStr { match *self { SmallCStr::OnStack { ref data, len_with_nul } => { unsafe { let slice = &data[.. len_with_nul as usize]; ffi::CStr::from_bytes_with_nul_unchecked(slice) } } SmallCStr::OnHeap { ref data } => { data.as_c_str() } } } #[inline] pub fn len_with_nul(&self) -> usize { match *self { SmallCStr::OnStack { len_with_nul, .. } => { len_with_nul as usize } SmallCStr::OnHeap { ref data } => { data.as_bytes_with_nul().len() } } } } impl Deref for SmallCStr { type Target = ffi::CStr; fn deref(&self) -> &ffi::CStr { self.as_c_str() } } #[test] fn short() { const TEXT: &str = "abcd"; let reference = ffi::CString::new(TEXT.to_string()).unwrap(); let scs = SmallCStr::new(TEXT); assert_eq!(scs.len_with_nul(), TEXT.len() + 1); assert_eq!(scs.as_c_str(), reference.as_c_str()); assert!(if let SmallCStr::OnStack { .. } = scs { true } else { false }); } #[test] fn empty() { const TEXT: &str = ""; let reference = ffi::CString::new(TEXT.to_string()).unwrap(); let scs = SmallCStr::new(TEXT); assert_eq!(scs.len_with_nul(), TEXT.len() + 1); assert_eq!(scs.as_c_str(), reference.as_c_str()); assert!(if let SmallCStr::OnStack { .. } = scs { true } else { false }); } #[test] fn long() { const TEXT: &str = "01234567890123456789012345678901234567890123456789\ 01234567890123456789012345678901234567890123456789\ 01234567890123456789012345678901234567890123456789"; let reference = ffi::CString::new(TEXT.to_string()).unwrap(); let scs = SmallCStr::new(TEXT); assert_eq!(scs.len_with_nul(), TEXT.len() + 1); assert_eq!(scs.as_c_str(), reference.as_c_str()); assert!(if let SmallCStr::OnHeap { .. } = scs { true } else { false }); } #[test] #[should_panic] fn internal_nul() { let _ = SmallCStr::new("abcd\0def"); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/work_queue.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use indexed_set::IdxSet; use indexed_vec::Idx; use std::collections::VecDeque; /// A work queue is a handy data structure for tracking work left to /// do. (For example, basic blocks left to process.) It is basically a /// de-duplicating queue; so attempting to insert X if X is already /// enqueued has no effect. This implementation assumes that the /// elements are dense indices, so it can allocate the queue to size /// and also use a bit set to track occupancy. pub struct WorkQueue<T: Idx> { deque: VecDeque<T>, set: IdxSet<T>, } impl<T: Idx> WorkQueue<T> { /// Create a new work queue with all the elements from (0..len). #[inline] pub fn with_all(len: usize) -> Self { WorkQueue { deque: (0..len).map(T::new).collect(), set: IdxSet::new_filled(len), } } /// Create a new work queue that starts empty, where elements range from (0..len). #[inline] pub fn with_none(len: usize) -> Self { WorkQueue { deque: VecDeque::with_capacity(len), set: IdxSet::new_empty(len), } } /// Attempt to enqueue `element` in the work queue. Returns false if it was already present. #[inline] pub fn insert(&mut self, element: T) -> bool { if self.set.add(&element) { self.deque.push_back(element); true } else { false } } /// Attempt to enqueue `element` in the work queue. Returns false if it was already present. #[inline] pub fn pop(&mut self) -> Option<T> { if let Some(element) = self.deque.pop_front() { self.set.remove(&element); Some(element) } else { None } } /// True if nothing is enqueued. #[inline] pub fn is_empty(&self) -> bool { self.deque.is_empty() } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/svh.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Calculation and management of a Strict Version Hash for crates //! //! The SVH is used for incremental compilation to track when HIR //! nodes have changed between compilations, and also to detect //! mismatches where we have two versions of the same crate that were //! compiled from distinct sources. use std::fmt; use std::hash::{Hash, Hasher}; use serialize::{Encodable, Decodable, Encoder, Decoder}; use stable_hasher; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct Svh { hash: u64, } impl Svh { /// Create a new `Svh` given the hash. If you actually want to /// compute the SVH from some HIR, you want the `calculate_svh` /// function found in `librustc_incremental`. pub fn new(hash: u64) -> Svh { Svh { hash: hash } } pub fn as_u64(&self) -> u64 { self.hash } pub fn to_string(&self) -> String { format!("{:016x}", self.hash) } } impl Hash for Svh { fn hash<H>(&self, state: &mut H) where H: Hasher { self.hash.to_le().hash(state); } } impl fmt::Display for Svh { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.pad(&self.to_string()) } } impl Encodable for Svh { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_u64(self.as_u64().to_le()) } } impl Decodable for Svh { fn decode<D: Decoder>(d: &mut D) -> Result<Svh, D::Error> { d.read_u64() .map(u64::from_le) .map(Svh::new) } } impl<T> stable_hasher::HashStable<T> for Svh { #[inline] fn hash_stable<W: stable_hasher::StableHasherResult>( &self, ctx: &mut T, hasher: &mut stable_hasher::StableHasher<W> ) { let Svh { hash } = *self; hash.hash_stable(ctx, hasher); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/bitslice.rs
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // FIXME: merge with `bitvec` use std::mem; pub type Word = usize; /// `BitSlice` provides helper methods for treating a `[Word]` /// as a bitvector. pub trait BitSlice { fn clear_bit(&mut self, idx: usize) -> bool; fn set_bit(&mut self, idx: usize) -> bool; fn get_bit(&self, idx: usize) -> bool; } impl BitSlice for [Word] { /// Clears bit at `idx` to 0; returns true iff this changed `self.` #[inline] fn clear_bit(&mut self, idx: usize) -> bool { let words = self; debug!("clear_bit: words={} idx={}", bits_to_string(words, words.len() * mem::size_of::<Word>() * 8), idx); let BitLookup { word, bit_in_word, bit_mask } = bit_lookup(idx); debug!("word={} bit_in_word={} bit_mask=0x{:x}", word, bit_in_word, bit_mask); let oldv = words[word]; let newv = oldv & !bit_mask; words[word] = newv; oldv != newv } /// Sets bit at `idx` to 1; returns true iff this changed `self.` #[inline] fn set_bit(&mut self, idx: usize) -> bool { let words = self; debug!("set_bit: words={} idx={}", bits_to_string(words, words.len() * mem::size_of::<Word>() * 8), idx); let BitLookup { word, bit_in_word, bit_mask } = bit_lookup(idx); debug!("word={} bit_in_word={} bit_mask={}", word, bit_in_word, bit_mask); let oldv = words[word]; let newv = oldv | bit_mask; words[word] = newv; oldv != newv } /// Extracts value of bit at `idx` in `self`. #[inline] fn get_bit(&self, idx: usize) -> bool { let words = self; let BitLookup { word, bit_mask, .. } = bit_lookup(idx); (words[word] & bit_mask) != 0 } } struct BitLookup { /// An index of the word holding the bit in original `[Word]` of query. word: usize, /// Index of the particular bit within the word holding the bit. bit_in_word: usize, /// Word with single 1-bit set corresponding to where the bit is located. bit_mask: Word, } #[inline] fn bit_lookup(bit: usize) -> BitLookup { let word_bits = mem::size_of::<Word>() * 8; let word = bit / word_bits; let bit_in_word = bit % word_bits; let bit_mask = 1 << bit_in_word; BitLookup { word, bit_in_word, bit_mask } } pub fn bits_to_string(words: &[Word], bits: usize) -> String { let mut result = String::new(); let mut sep = '['; // Note: this is a little endian printout of bytes. // i tracks how many bits we have printed so far. let mut i = 0; for &word in words.iter() { let mut v = word; for _ in 0..mem::size_of::<Word>() { // for each byte in `v`: let remain = bits - i; // If less than a byte remains, then mask just that many bits. let mask = if remain <= 8 { (1 << remain) - 1 } else { 0xFF }; assert!(mask <= 0xFF); let byte = v & mask; result.push_str(&format!("{}{:02x}", sep, byte)); if remain <= 8 { break; } v >>= 8; i += 8; sep = '-'; } sep = '|'; } result.push(']'); result } #[inline] pub fn bitwise<Op:BitwiseOperator>(out_vec: &mut [Word], in_vec: &[Word], op: &Op) -> bool { assert_eq!(out_vec.len(), in_vec.len()); let mut changed = false; for (out_elt, in_elt) in out_vec.iter_mut().zip(in_vec) { let old_val = *out_elt; let new_val = op.join(old_val, *in_elt); *out_elt = new_val; changed |= old_val != new_val; } changed } pub trait BitwiseOperator { /// Applies some bit-operation pointwise to each of the bits in the two inputs. fn join(&self, pred1: Word, pred2: Word) -> Word; } pub struct Intersect; impl BitwiseOperator for Intersect { #[inline] fn join(&self, a: Word, b: Word) -> Word { a & b } } pub struct Union; impl BitwiseOperator for Union { #[inline] fn join(&self, a: Word, b: Word) -> Word { a | b } } pub struct Subtract; impl BitwiseOperator for Subtract { #[inline] fn join(&self, a: Word, b: Word) -> Word { a & !b } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/tuple_slice.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::slice; /// Allows to view uniform tuples as slices pub trait TupleSlice<T> { fn as_slice(&self) -> &[T]; fn as_mut_slice(&mut self) -> &mut [T]; } macro_rules! impl_tuple_slice { ($tuple_type:ty, $size:expr) => { impl<T> TupleSlice<T> for $tuple_type { fn as_slice(&self) -> &[T] { unsafe { let ptr = &self.0 as *const T; slice::from_raw_parts(ptr, $size) } } fn as_mut_slice(&mut self) -> &mut [T] { unsafe { let ptr = &mut self.0 as *mut T; slice::from_raw_parts_mut(ptr, $size) } } } } } impl_tuple_slice!((T, T), 2); impl_tuple_slice!((T, T, T), 3); impl_tuple_slice!((T, T, T, T), 4); impl_tuple_slice!((T, T, T, T, T), 5); impl_tuple_slice!((T, T, T, T, T, T), 6); impl_tuple_slice!((T, T, T, T, T, T, T), 7); impl_tuple_slice!((T, T, T, T, T, T, T, T), 8); #[test] fn test_sliced_tuples() { let t2 = (100, 101); assert_eq!(t2.as_slice(), &[100, 101]); let t3 = (102, 103, 104); assert_eq!(t3.as_slice(), &[102, 103, 104]); let t4 = (105, 106, 107, 108); assert_eq!(t4.as_slice(), &[105, 106, 107, 108]); let t5 = (109, 110, 111, 112, 113); assert_eq!(t5.as_slice(), &[109, 110, 111, 112, 113]); let t6 = (114, 115, 116, 117, 118, 119); assert_eq!(t6.as_slice(), &[114, 115, 116, 117, 118, 119]); let t7 = (120, 121, 122, 123, 124, 125, 126); assert_eq!(t7.as_slice(), &[120, 121, 122, 123, 124, 125, 126]); let t8 = (127, 128, 129, 130, 131, 132, 133, 134); assert_eq!(t8.as_slice(), &[127, 128, 129, 130, 131, 132, 133, 134]); }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/fx.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::collections::{HashMap, HashSet}; use std::default::Default; use std::hash::Hash; pub use rustc_hash::FxHashMap; pub use rustc_hash::FxHashSet; pub use rustc_hash::FxHasher; #[allow(non_snake_case)] pub fn FxHashMap<K: Hash + Eq, V>() -> FxHashMap<K, V> { HashMap::default() } #[allow(non_snake_case)] pub fn FxHashSet<V: Hash + Eq>() -> FxHashSet<V> { HashSet::default() }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/indexed_set.rs
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use array_vec::ArrayVec; use std::fmt; use std::iter; use std::marker::PhantomData; use std::mem; use std::slice; use bitslice::{BitSlice, Word}; use bitslice::{bitwise, Union, Subtract, Intersect}; use indexed_vec::Idx; use rustc_serialize; /// Represents a set of some element type E, where each E is identified by some /// unique index type `T`. /// /// In other words, `T` is the type used to index into the bitvector /// this type uses to represent the set of object it holds. /// /// The representation is dense, using one bit per possible element. #[derive(Eq, PartialEq)] pub struct IdxSet<T: Idx> { _pd: PhantomData<fn(&T)>, bits: Vec<Word>, } impl<T: Idx> Clone for IdxSet<T> { fn clone(&self) -> Self { IdxSet { _pd: PhantomData, bits: self.bits.clone() } } } impl<T: Idx> rustc_serialize::Encodable for IdxSet<T> { fn encode<E: rustc_serialize::Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> { self.bits.encode(encoder) } } impl<T: Idx> rustc_serialize::Decodable for IdxSet<T> { fn decode<D: rustc_serialize::Decoder>(d: &mut D) -> Result<IdxSet<T>, D::Error> { let words: Vec<Word> = rustc_serialize::Decodable::decode(d)?; Ok(IdxSet { _pd: PhantomData, bits: words, }) } } const BITS_PER_WORD: usize = mem::size_of::<Word>() * 8; impl<T: Idx> fmt::Debug for IdxSet<T> { fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result { w.debug_list() .entries(self.iter()) .finish() } } impl<T: Idx> IdxSet<T> { fn new(init: Word, universe_size: usize) -> Self { let num_words = (universe_size + (BITS_PER_WORD - 1)) / BITS_PER_WORD; IdxSet { _pd: Default::default(), bits: vec![init; num_words], } } /// Creates set holding every element whose index falls in range 0..universe_size. pub fn new_filled(universe_size: usize) -> Self { let mut result = Self::new(!0, universe_size); result.trim_to(universe_size); result } /// Creates set holding no elements. pub fn new_empty(universe_size: usize) -> Self { Self::new(0, universe_size) } /// Duplicates as a hybrid set. pub fn to_hybrid(&self) -> HybridIdxSet<T> { // This universe_size may be slightly larger than the one specified // upon creation, due to rounding up to a whole word. That's ok. let universe_size = self.bits.len() * BITS_PER_WORD; // Note: we currently don't bother trying to make a Sparse set. HybridIdxSet::Dense(self.to_owned(), universe_size) } /// Removes all elements pub fn clear(&mut self) { for b in &mut self.bits { *b = 0; } } /// Sets all elements up to `universe_size` pub fn set_up_to(&mut self, universe_size: usize) { for b in &mut self.bits { *b = !0; } self.trim_to(universe_size); } /// Clear all elements above `universe_size`. fn trim_to(&mut self, universe_size: usize) { // `trim_block` is the first block where some bits have // to be cleared. let trim_block = universe_size / BITS_PER_WORD; // all the blocks above it have to be completely cleared. if trim_block < self.bits.len() { for b in &mut self.bits[trim_block+1..] { *b = 0; } // at that block, the `universe_size % BITS_PER_WORD` lsbs // should remain. let remaining_bits = universe_size % BITS_PER_WORD; let mask = (1<<remaining_bits)-1; self.bits[trim_block] &= mask; } } /// Removes `elem` from the set `self`; returns true iff this changed `self`. pub fn remove(&mut self, elem: &T) -> bool { self.bits.clear_bit(elem.index()) } /// Adds `elem` to the set `self`; returns true iff this changed `self`. pub fn add(&mut self, elem: &T) -> bool { self.bits.set_bit(elem.index()) } /// Returns true iff set `self` contains `elem`. pub fn contains(&self, elem: &T) -> bool { self.bits.get_bit(elem.index()) } pub fn words(&self) -> &[Word] { &self.bits } pub fn words_mut(&mut self) -> &mut [Word] { &mut self.bits } /// Efficiently overwrite `self` with `other`. Panics if `self` and `other` /// don't have the same length. pub fn overwrite(&mut self, other: &IdxSet<T>) { self.words_mut().clone_from_slice(other.words()); } /// Set `self = self | other` and return true if `self` changed /// (i.e., if new bits were added). pub fn union(&mut self, other: &IdxSet<T>) -> bool { bitwise(self.words_mut(), other.words(), &Union) } /// Like `union()`, but takes a `SparseIdxSet` argument. fn union_sparse(&mut self, other: &SparseIdxSet<T>) -> bool { let mut changed = false; for elem in other.iter() { changed |= self.add(&elem); } changed } /// Like `union()`, but takes a `HybridIdxSet` argument. pub fn union_hybrid(&mut self, other: &HybridIdxSet<T>) -> bool { match other { HybridIdxSet::Sparse(sparse, _) => self.union_sparse(sparse), HybridIdxSet::Dense(dense, _) => self.union(dense), } } /// Set `self = self - other` and return true if `self` changed. /// (i.e., if any bits were removed). pub fn subtract(&mut self, other: &IdxSet<T>) -> bool { bitwise(self.words_mut(), other.words(), &Subtract) } /// Like `subtract()`, but takes a `SparseIdxSet` argument. fn subtract_sparse(&mut self, other: &SparseIdxSet<T>) -> bool { let mut changed = false; for elem in other.iter() { changed |= self.remove(&elem); } changed } /// Like `subtract()`, but takes a `HybridIdxSet` argument. pub fn subtract_hybrid(&mut self, other: &HybridIdxSet<T>) -> bool { match other { HybridIdxSet::Sparse(sparse, _) => self.subtract_sparse(sparse), HybridIdxSet::Dense(dense, _) => self.subtract(dense), } } /// Set `self = self & other` and return true if `self` changed. /// (i.e., if any bits were removed). pub fn intersect(&mut self, other: &IdxSet<T>) -> bool { bitwise(self.words_mut(), other.words(), &Intersect) } pub fn iter(&self) -> Iter<T> { Iter { cur: None, iter: self.words().iter().enumerate(), _pd: PhantomData, } } } pub struct Iter<'a, T: Idx> { cur: Option<(Word, usize)>, iter: iter::Enumerate<slice::Iter<'a, Word>>, _pd: PhantomData<fn(&T)>, } impl<'a, T: Idx> Iterator for Iter<'a, T> { type Item = T; fn next(&mut self) -> Option<T> { loop { if let Some((ref mut word, offset)) = self.cur { let bit_pos = word.trailing_zeros() as usize; if bit_pos != BITS_PER_WORD { let bit = 1 << bit_pos; *word ^= bit; return Some(T::new(bit_pos + offset)) } } let (i, word) = self.iter.next()?; self.cur = Some((*word, BITS_PER_WORD * i)); } } } const SPARSE_MAX: usize = 8; /// A sparse index set with a maximum of SPARSE_MAX elements. Used by /// HybridIdxSet; do not use directly. /// /// The elements are stored as an unsorted vector with no duplicates. #[derive(Clone, Debug)] pub struct SparseIdxSet<T: Idx>(ArrayVec<[T; SPARSE_MAX]>); impl<T: Idx> SparseIdxSet<T> { fn new() -> Self { SparseIdxSet(ArrayVec::new()) } fn len(&self) -> usize { self.0.len() } fn contains(&self, elem: &T) -> bool { self.0.contains(elem) } fn add(&mut self, elem: &T) -> bool { // Ensure there are no duplicates. if self.0.contains(elem) { false } else { self.0.push(*elem); true } } fn remove(&mut self, elem: &T) -> bool { if let Some(i) = self.0.iter().position(|e| e == elem) { // Swap the found element to the end, then pop it. let len = self.0.len(); self.0.swap(i, len - 1); self.0.pop(); true } else { false } } fn to_dense(&self, universe_size: usize) -> IdxSet<T> { let mut dense = IdxSet::new_empty(universe_size); for elem in self.0.iter() { dense.add(elem); } dense } fn iter(&self) -> SparseIter<T> { SparseIter { iter: self.0.iter(), } } } pub struct SparseIter<'a, T: Idx> { iter: slice::Iter<'a, T>, } impl<'a, T: Idx> Iterator for SparseIter<'a, T> { type Item = T; fn next(&mut self) -> Option<T> { self.iter.next().map(|e| *e) } } /// Like IdxSet, but with a hybrid representation: sparse when there are few /// elements in the set, but dense when there are many. It's especially /// efficient for sets that typically have a small number of elements, but a /// large `universe_size`, and are cleared frequently. #[derive(Clone, Debug)] pub enum HybridIdxSet<T: Idx> { Sparse(SparseIdxSet<T>, usize), Dense(IdxSet<T>, usize), } impl<T: Idx> HybridIdxSet<T> { pub fn new_empty(universe_size: usize) -> Self { HybridIdxSet::Sparse(SparseIdxSet::new(), universe_size) } fn universe_size(&mut self) -> usize { match *self { HybridIdxSet::Sparse(_, size) => size, HybridIdxSet::Dense(_, size) => size, } } pub fn clear(&mut self) { let universe_size = self.universe_size(); *self = HybridIdxSet::new_empty(universe_size); } /// Returns true iff set `self` contains `elem`. pub fn contains(&self, elem: &T) -> bool { match self { HybridIdxSet::Sparse(sparse, _) => sparse.contains(elem), HybridIdxSet::Dense(dense, _) => dense.contains(elem), } } /// Adds `elem` to the set `self`. pub fn add(&mut self, elem: &T) -> bool { match self { HybridIdxSet::Sparse(sparse, _) if sparse.len() < SPARSE_MAX => { // The set is sparse and has space for `elem`. sparse.add(elem) } HybridIdxSet::Sparse(sparse, _) if sparse.contains(elem) => { // The set is sparse and does not have space for `elem`, but // that doesn't matter because `elem` is already present. false } HybridIdxSet::Sparse(_, _) => { // The set is sparse and full. Convert to a dense set. // // FIXME: This code is awful, but I can't work out how else to // appease the borrow checker. let dummy = HybridIdxSet::Sparse(SparseIdxSet::new(), 0); match mem::replace(self, dummy) { HybridIdxSet::Sparse(sparse, universe_size) => { let mut dense = sparse.to_dense(universe_size); let changed = dense.add(elem); assert!(changed); mem::replace(self, HybridIdxSet::Dense(dense, universe_size)); changed } _ => panic!("impossible"), } } HybridIdxSet::Dense(dense, _) => dense.add(elem), } } /// Removes `elem` from the set `self`. pub fn remove(&mut self, elem: &T) -> bool { // Note: we currently don't bother going from Dense back to Sparse. match self { HybridIdxSet::Sparse(sparse, _) => sparse.remove(elem), HybridIdxSet::Dense(dense, _) => dense.remove(elem), } } /// Converts to a dense set, consuming itself in the process. pub fn to_dense(self) -> IdxSet<T> { match self { HybridIdxSet::Sparse(sparse, universe_size) => sparse.to_dense(universe_size), HybridIdxSet::Dense(dense, _) => dense, } } /// Iteration order is unspecified. pub fn iter(&self) -> HybridIter<T> { match self { HybridIdxSet::Sparse(sparse, _) => HybridIter::Sparse(sparse.iter()), HybridIdxSet::Dense(dense, _) => HybridIter::Dense(dense.iter()), } } } pub enum HybridIter<'a, T: Idx> { Sparse(SparseIter<'a, T>), Dense(Iter<'a, T>), } impl<'a, T: Idx> Iterator for HybridIter<'a, T> { type Item = T; fn next(&mut self) -> Option<T> { match self { HybridIter::Sparse(sparse) => sparse.next(), HybridIter::Dense(dense) => dense.next(), } } } #[test] fn test_trim_to() { use std::cmp; for i in 0..256 { let mut idx_buf: IdxSet<usize> = IdxSet::new_filled(128); idx_buf.trim_to(i); let elems: Vec<usize> = idx_buf.iter().collect(); let expected: Vec<usize> = (0..cmp::min(i, 128)).collect(); assert_eq!(elems, expected); } } #[test] fn test_set_up_to() { for i in 0..128 { for mut idx_buf in vec![IdxSet::new_empty(128), IdxSet::new_filled(128)] .into_iter() { idx_buf.set_up_to(i); let elems: Vec<usize> = idx_buf.iter().collect(); let expected: Vec<usize> = (0..i).collect(); assert_eq!(elems, expected); } } } #[test] fn test_new_filled() { for i in 0..128 { let idx_buf = IdxSet::new_filled(i); let elems: Vec<usize> = idx_buf.iter().collect(); let expected: Vec<usize> = (0..i).collect(); assert_eq!(elems, expected); } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/thin_vec.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /// A vector type optimized for cases where this size is usually 0 (c.f. `SmallVector`). /// The `Option<Box<..>>` wrapping allows us to represent a zero sized vector with `None`, /// which uses only a single (null) pointer. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ThinVec<T>(Option<Box<Vec<T>>>); impl<T> ThinVec<T> { pub fn new() -> Self { ThinVec(None) } } impl<T> From<Vec<T>> for ThinVec<T> { fn from(vec: Vec<T>) -> Self { if vec.is_empty() { ThinVec(None) } else { ThinVec(Some(Box::new(vec))) } } } impl<T> Into<Vec<T>> for ThinVec<T> { fn into(self) -> Vec<T> { match self { ThinVec(None) => Vec::new(), ThinVec(Some(vec)) => *vec, } } } impl<T> ::std::ops::Deref for ThinVec<T> { type Target = [T]; fn deref(&self) -> &[T] { match *self { ThinVec(None) => &[], ThinVec(Some(ref vec)) => vec, } } } impl<T> Extend<T> for ThinVec<T> { fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { match *self { ThinVec(Some(ref mut vec)) => vec.extend(iter), ThinVec(None) => *self = iter.into_iter().collect::<Vec<_>>().into(), } } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/flock.rs
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Simple file-locking apis for each OS. //! //! This is not meant to be in the standard library, it does nothing with //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. #![allow(non_camel_case_types)] use std::io; use std::path::Path; cfg_if! { if #[cfg(unix)] { use std::ffi::{CString, OsStr}; use std::os::unix::prelude::*; use std::path::Path; use libc; #[cfg(any(target_os = "linux", target_os = "android"))] mod os { use libc; #[repr(C)] pub struct flock { pub l_type: libc::c_short, pub l_whence: libc::c_short, pub l_start: libc::off_t, pub l_len: libc::off_t, pub l_pid: libc::pid_t, // not actually here, but brings in line with freebsd pub l_sysid: libc::c_int, } pub const F_RDLCK: libc::c_short = 0; pub const F_WRLCK: libc::c_short = 1; pub const F_UNLCK: libc::c_short = 2; pub const F_SETLK: libc::c_int = 6; pub const F_SETLKW: libc::c_int = 7; } #[cfg(target_os = "freebsd")] mod os { use libc; #[repr(C)] pub struct flock { pub l_start: libc::off_t, pub l_len: libc::off_t, pub l_pid: libc::pid_t, pub l_type: libc::c_short, pub l_whence: libc::c_short, pub l_sysid: libc::c_int, } pub const F_RDLCK: libc::c_short = 1; pub const F_UNLCK: libc::c_short = 2; pub const F_WRLCK: libc::c_short = 3; pub const F_SETLK: libc::c_int = 12; pub const F_SETLKW: libc::c_int = 13; } #[cfg(any(target_os = "dragonfly", target_os = "bitrig", target_os = "netbsd", target_os = "openbsd"))] mod os { use libc; #[repr(C)] pub struct flock { pub l_start: libc::off_t, pub l_len: libc::off_t, pub l_pid: libc::pid_t, pub l_type: libc::c_short, pub l_whence: libc::c_short, // not actually here, but brings in line with freebsd pub l_sysid: libc::c_int, } pub const F_RDLCK: libc::c_short = 1; pub const F_UNLCK: libc::c_short = 2; pub const F_WRLCK: libc::c_short = 3; pub const F_SETLK: libc::c_int = 8; pub const F_SETLKW: libc::c_int = 9; } #[cfg(target_os = "haiku")] mod os { use libc; #[repr(C)] pub struct flock { pub l_type: libc::c_short, pub l_whence: libc::c_short, pub l_start: libc::off_t, pub l_len: libc::off_t, pub l_pid: libc::pid_t, // not actually here, but brings in line with freebsd pub l_sysid: libc::c_int, } pub const F_RDLCK: libc::c_short = 0x0040; pub const F_UNLCK: libc::c_short = 0x0200; pub const F_WRLCK: libc::c_short = 0x0400; pub const F_SETLK: libc::c_int = 0x0080; pub const F_SETLKW: libc::c_int = 0x0100; } #[cfg(any(target_os = "macos", target_os = "ios"))] mod os { use libc; #[repr(C)] pub struct flock { pub l_start: libc::off_t, pub l_len: libc::off_t, pub l_pid: libc::pid_t, pub l_type: libc::c_short, pub l_whence: libc::c_short, // not actually here, but brings in line with freebsd pub l_sysid: libc::c_int, } pub const F_RDLCK: libc::c_short = 1; pub const F_UNLCK: libc::c_short = 2; pub const F_WRLCK: libc::c_short = 3; pub const F_SETLK: libc::c_int = 8; pub const F_SETLKW: libc::c_int = 9; } #[cfg(target_os = "solaris")] mod os { use libc; #[repr(C)] pub struct flock { pub l_type: libc::c_short, pub l_whence: libc::c_short, pub l_start: libc::off_t, pub l_len: libc::off_t, pub l_sysid: libc::c_int, pub l_pid: libc::pid_t, } pub const F_RDLCK: libc::c_short = 1; pub const F_WRLCK: libc::c_short = 2; pub const F_UNLCK: libc::c_short = 3; pub const F_SETLK: libc::c_int = 6; pub const F_SETLKW: libc::c_int = 7; } #[derive(Debug)] pub struct Lock { fd: libc::c_int, } impl Lock { pub fn new(p: &Path, wait: bool, create: bool, exclusive: bool) -> io::Result<Lock> { let os: &OsStr = p.as_ref(); let buf = CString::new(os.as_bytes()).unwrap(); let open_flags = if create { libc::O_RDWR | libc::O_CREAT } else { libc::O_RDWR }; let fd = unsafe { libc::open(buf.as_ptr(), open_flags, libc::S_IRWXU as libc::c_int) }; if fd < 0 { return Err(io::Error::last_os_error()); } let lock_type = if exclusive { os::F_WRLCK } else { os::F_RDLCK }; let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: lock_type, l_sysid: 0, }; let cmd = if wait { os::F_SETLKW } else { os::F_SETLK }; let ret = unsafe { libc::fcntl(fd, cmd, &flock) }; if ret == -1 { let err = io::Error::last_os_error(); unsafe { libc::close(fd); } Err(err) } else { Ok(Lock { fd: fd }) } } } impl Drop for Lock { fn drop(&mut self) { let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: os::F_UNLCK, l_sysid: 0, }; unsafe { libc::fcntl(self.fd, os::F_SETLK, &flock); libc::close(self.fd); } } } } else if #[cfg(windows)] { use std::mem; use std::os::windows::prelude::*; use std::os::windows::raw::HANDLE; use std::path::Path; use std::fs::{File, OpenOptions}; use std::os::raw::{c_ulong, c_int}; type DWORD = c_ulong; type BOOL = c_int; type ULONG_PTR = usize; type LPOVERLAPPED = *mut OVERLAPPED; const LOCKFILE_EXCLUSIVE_LOCK: DWORD = 0x0000_0002; const LOCKFILE_FAIL_IMMEDIATELY: DWORD = 0x0000_0001; const FILE_SHARE_DELETE: DWORD = 0x4; const FILE_SHARE_READ: DWORD = 0x1; const FILE_SHARE_WRITE: DWORD = 0x2; #[repr(C)] struct OVERLAPPED { Internal: ULONG_PTR, InternalHigh: ULONG_PTR, Offset: DWORD, OffsetHigh: DWORD, hEvent: HANDLE, } extern "system" { fn LockFileEx(hFile: HANDLE, dwFlags: DWORD, dwReserved: DWORD, nNumberOfBytesToLockLow: DWORD, nNumberOfBytesToLockHigh: DWORD, lpOverlapped: LPOVERLAPPED) -> BOOL; } #[derive(Debug)] pub struct Lock { _file: File, } impl Lock { pub fn new(p: &Path, wait: bool, create: bool, exclusive: bool) -> io::Result<Lock> { assert!(p.parent().unwrap().exists(), "Parent directory of lock-file must exist: {}", p.display()); let share_mode = FILE_SHARE_DELETE | FILE_SHARE_READ | FILE_SHARE_WRITE; let mut open_options = OpenOptions::new(); open_options.read(true) .share_mode(share_mode); if create { open_options.create(true) .write(true); } debug!("Attempting to open lock file `{}`", p.display()); let file = match open_options.open(p) { Ok(file) => { debug!("Lock file opened successfully"); file } Err(err) => { debug!("Error opening lock file: {}", err); return Err(err) } }; let ret = unsafe { let mut overlapped: OVERLAPPED = mem::zeroed(); let mut dwFlags = 0; if !wait { dwFlags |= LOCKFILE_FAIL_IMMEDIATELY; } if exclusive { dwFlags |= LOCKFILE_EXCLUSIVE_LOCK; } debug!("Attempting to acquire lock on lock file `{}`", p.display()); LockFileEx(file.as_raw_handle(), dwFlags, 0, 0xFFFF_FFFF, 0xFFFF_FFFF, &mut overlapped) }; if ret == 0 { let err = io::Error::last_os_error(); debug!("Failed acquiring file lock: {}", err); Err(err) } else { debug!("Successfully acquired lock."); Ok(Lock { _file: file }) } } } // Note that we don't need a Drop impl on the Windows: The file is unlocked // automatically when it's closed. } else { #[derive(Debug)] pub struct Lock(()); impl Lock { pub fn new(_p: &Path, _wait: bool, _create: bool, _exclusive: bool) -> io::Result<Lock> { let msg = "file locks not supported on this platform"; Err(io::Error::new(io::ErrorKind::Other, msg)) } } } } impl Lock { pub fn panicking_new(p: &Path, wait: bool, create: bool, exclusive: bool) -> Lock { Lock::new(p, wait, create, exclusive).unwrap_or_else(|err| { panic!("could not lock `{}`: {}", p.display(), err); }) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/array_vec.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A stack-allocated vector, allowing storage of N elements on the stack. use std::marker::Unsize; use std::iter::Extend; use std::ptr::{self, drop_in_place, NonNull}; use std::ops::{Deref, DerefMut, Range}; use std::hash::{Hash, Hasher}; use std::slice; use std::fmt; use std::mem; use std::mem::ManuallyDrop; use std::ops::Bound::{Excluded, Included, Unbounded}; use std::ops::RangeBounds; pub unsafe trait Array { type Element; type PartialStorage: Unsize<[ManuallyDrop<Self::Element>]>; const LEN: usize; } unsafe impl<T> Array for [T; 1] { type Element = T; type PartialStorage = [ManuallyDrop<T>; 1]; const LEN: usize = 1; } unsafe impl<T> Array for [T; 8] { type Element = T; type PartialStorage = [ManuallyDrop<T>; 8]; const LEN: usize = 8; } unsafe impl<T> Array for [T; 32] { type Element = T; type PartialStorage = [ManuallyDrop<T>; 32]; const LEN: usize = 32; } pub struct ArrayVec<A: Array> { count: usize, values: A::PartialStorage } impl<A> Hash for ArrayVec<A> where A: Array, A::Element: Hash { fn hash<H>(&self, state: &mut H) where H: Hasher { (&self[..]).hash(state); } } impl<A> Clone for ArrayVec<A> where A: Array, A::Element: Clone { fn clone(&self) -> Self { let mut v = ArrayVec::new(); v.extend(self.iter().cloned()); v } } impl<A: Array> ArrayVec<A> { pub fn new() -> Self { ArrayVec { count: 0, values: unsafe { ::std::mem::uninitialized() }, } } pub fn len(&self) -> usize { self.count } pub unsafe fn set_len(&mut self, len: usize) { self.count = len; } /// Panics when the stack vector is full. pub fn push(&mut self, el: A::Element) { let arr = &mut self.values as &mut [ManuallyDrop<_>]; arr[self.count] = ManuallyDrop::new(el); self.count += 1; } pub fn pop(&mut self) -> Option<A::Element> { if self.count > 0 { let arr = &mut self.values as &mut [ManuallyDrop<_>]; self.count -= 1; unsafe { let value = ptr::read(&*arr[self.count]); Some(value) } } else { None } } pub fn drain<R>(&mut self, range: R) -> Drain<A> where R: RangeBounds<usize> { // Memory safety // // When the Drain is first created, it shortens the length of // the source vector to make sure no uninitialized or moved-from elements // are accessible at all if the Drain's destructor never gets to run. // // Drain will ptr::read out the values to remove. // When finished, remaining tail of the vec is copied back to cover // the hole, and the vector length is restored to the new length. // let len = self.len(); let start = match range.start_bound() { Included(&n) => n, Excluded(&n) => n + 1, Unbounded => 0, }; let end = match range.end_bound() { Included(&n) => n + 1, Excluded(&n) => n, Unbounded => len, }; assert!(start <= end); assert!(end <= len); unsafe { // set self.vec length's to start, to be safe in case Drain is leaked self.set_len(start); // Use the borrow in the IterMut to indicate borrowing behavior of the // whole Drain iterator (like &mut T). let range_slice = { let arr = &mut self.values as &mut [ManuallyDrop<<A as Array>::Element>]; slice::from_raw_parts_mut(arr.as_mut_ptr().add(start), end - start) }; Drain { tail_start: end, tail_len: len - end, iter: range_slice.iter(), array_vec: NonNull::from(self), } } } } impl<A> Default for ArrayVec<A> where A: Array { fn default() -> Self { ArrayVec::new() } } impl<A> fmt::Debug for ArrayVec<A> where A: Array, A::Element: fmt::Debug { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self[..].fmt(f) } } impl<A: Array> Deref for ArrayVec<A> { type Target = [A::Element]; fn deref(&self) -> &Self::Target { unsafe { slice::from_raw_parts(&self.values as *const _ as *const A::Element, self.count) } } } impl<A: Array> DerefMut for ArrayVec<A> { fn deref_mut(&mut self) -> &mut [A::Element] { unsafe { slice::from_raw_parts_mut(&mut self.values as *mut _ as *mut A::Element, self.count) } } } impl<A: Array> Drop for ArrayVec<A> { fn drop(&mut self) { unsafe { drop_in_place(&mut self[..]) } } } impl<A: Array> Extend<A::Element> for ArrayVec<A> { fn extend<I>(&mut self, iter: I) where I: IntoIterator<Item=A::Element> { for el in iter { self.push(el); } } } pub struct Iter<A: Array> { indices: Range<usize>, store: A::PartialStorage, } impl<A: Array> Drop for Iter<A> { fn drop(&mut self) { self.for_each(drop); } } impl<A: Array> Iterator for Iter<A> { type Item = A::Element; fn next(&mut self) -> Option<A::Element> { let arr = &self.store as &[ManuallyDrop<_>]; unsafe { self.indices.next().map(|i| ptr::read(&*arr[i])) } } fn size_hint(&self) -> (usize, Option<usize>) { self.indices.size_hint() } } pub struct Drain<'a, A: Array> where A::Element: 'a { tail_start: usize, tail_len: usize, iter: slice::Iter<'a, ManuallyDrop<A::Element>>, array_vec: NonNull<ArrayVec<A>>, } impl<'a, A: Array> Iterator for Drain<'a, A> { type Item = A::Element; #[inline] fn next(&mut self) -> Option<A::Element> { self.iter.next().map(|elt| unsafe { ptr::read(&**elt) }) } fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() } } impl<'a, A: Array> Drop for Drain<'a, A> { fn drop(&mut self) { // exhaust self first self.for_each(drop); if self.tail_len > 0 { unsafe { let source_array_vec: &mut ArrayVec<A> = self.array_vec.as_mut(); // memmove back untouched tail, update to new length let start = source_array_vec.len(); let tail = self.tail_start; { let arr = &mut source_array_vec.values as &mut [ManuallyDrop<<A as Array>::Element>]; let src = arr.as_ptr().add(tail); let dst = arr.as_mut_ptr().add(start); ptr::copy(src, dst, self.tail_len); }; source_array_vec.set_len(start + self.tail_len); } } } } impl<A: Array> IntoIterator for ArrayVec<A> { type Item = A::Element; type IntoIter = Iter<A>; fn into_iter(self) -> Self::IntoIter { let store = unsafe { ptr::read(&self.values) }; let indices = 0..self.count; mem::forget(self); Iter { indices, store, } } } impl<'a, A: Array> IntoIterator for &'a ArrayVec<A> { type Item = &'a A::Element; type IntoIter = slice::Iter<'a, A::Element>; fn into_iter(self) -> Self::IntoIter { self.iter() } } impl<'a, A: Array> IntoIterator for &'a mut ArrayVec<A> { type Item = &'a mut A::Element; type IntoIter = slice::IterMut<'a, A::Element>; fn into_iter(self) -> Self::IntoIter { self.iter_mut() } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/accumulate_vec.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A vector type intended to be used for collecting from iterators onto the stack. //! //! Space for up to N elements is provided on the stack. If more elements are collected, Vec is //! used to store the values on the heap. //! //! The N above is determined by Array's implementor, by way of an associated constant. use std::ops::{Deref, DerefMut, RangeBounds}; use std::iter::{self, IntoIterator, FromIterator}; use std::slice; use std::vec; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use array_vec::{self, Array, ArrayVec}; #[derive(Hash, Debug)] pub enum AccumulateVec<A: Array> { Array(ArrayVec<A>), Heap(Vec<A::Element>) } impl<A> Clone for AccumulateVec<A> where A: Array, A::Element: Clone { fn clone(&self) -> Self { match *self { AccumulateVec::Array(ref arr) => AccumulateVec::Array(arr.clone()), AccumulateVec::Heap(ref vec) => AccumulateVec::Heap(vec.clone()), } } } impl<A: Array> AccumulateVec<A> { pub fn new() -> AccumulateVec<A> { AccumulateVec::Array(ArrayVec::new()) } pub fn is_array(&self) -> bool { match self { AccumulateVec::Array(..) => true, AccumulateVec::Heap(..) => false, } } pub fn one(el: A::Element) -> Self { iter::once(el).collect() } pub fn many<I: IntoIterator<Item=A::Element>>(iter: I) -> Self { iter.into_iter().collect() } pub fn len(&self) -> usize { match *self { AccumulateVec::Array(ref arr) => arr.len(), AccumulateVec::Heap(ref vec) => vec.len(), } } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn pop(&mut self) -> Option<A::Element> { match *self { AccumulateVec::Array(ref mut arr) => arr.pop(), AccumulateVec::Heap(ref mut vec) => vec.pop(), } } pub fn drain<R>(&mut self, range: R) -> Drain<A> where R: RangeBounds<usize> { match *self { AccumulateVec::Array(ref mut v) => { Drain::Array(v.drain(range)) }, AccumulateVec::Heap(ref mut v) => { Drain::Heap(v.drain(range)) }, } } } impl<A: Array> Deref for AccumulateVec<A> { type Target = [A::Element]; fn deref(&self) -> &Self::Target { match *self { AccumulateVec::Array(ref v) => v, AccumulateVec::Heap(ref v) => v, } } } impl<A: Array> DerefMut for AccumulateVec<A> { fn deref_mut(&mut self) -> &mut [A::Element] { match *self { AccumulateVec::Array(ref mut v) => v, AccumulateVec::Heap(ref mut v) => v, } } } impl<A: Array> FromIterator<A::Element> for AccumulateVec<A> { fn from_iter<I>(iter: I) -> AccumulateVec<A> where I: IntoIterator<Item=A::Element> { let iter = iter.into_iter(); if iter.size_hint().1.map_or(false, |n| n <= A::LEN) { let mut v = ArrayVec::new(); v.extend(iter); AccumulateVec::Array(v) } else { AccumulateVec::Heap(iter.collect()) } } } pub struct IntoIter<A: Array> { repr: IntoIterRepr<A>, } enum IntoIterRepr<A: Array> { Array(array_vec::Iter<A>), Heap(vec::IntoIter<A::Element>), } impl<A: Array> Iterator for IntoIter<A> { type Item = A::Element; fn next(&mut self) -> Option<A::Element> { match self.repr { IntoIterRepr::Array(ref mut arr) => arr.next(), IntoIterRepr::Heap(ref mut iter) => iter.next(), } } fn size_hint(&self) -> (usize, Option<usize>) { match self.repr { IntoIterRepr::Array(ref iter) => iter.size_hint(), IntoIterRepr::Heap(ref iter) => iter.size_hint(), } } } pub enum Drain<'a, A: Array> where A::Element: 'a { Array(array_vec::Drain<'a, A>), Heap(vec::Drain<'a, A::Element>), } impl<'a, A: Array> Iterator for Drain<'a, A> { type Item = A::Element; fn next(&mut self) -> Option<A::Element> { match *self { Drain::Array(ref mut drain) => drain.next(), Drain::Heap(ref mut drain) => drain.next(), } } fn size_hint(&self) -> (usize, Option<usize>) { match *self { Drain::Array(ref drain) => drain.size_hint(), Drain::Heap(ref drain) => drain.size_hint(), } } } impl<A: Array> IntoIterator for AccumulateVec<A> { type Item = A::Element; type IntoIter = IntoIter<A>; fn into_iter(self) -> Self::IntoIter { IntoIter { repr: match self { AccumulateVec::Array(arr) => IntoIterRepr::Array(arr.into_iter()), AccumulateVec::Heap(vec) => IntoIterRepr::Heap(vec.into_iter()), } } } } impl<'a, A: Array> IntoIterator for &'a AccumulateVec<A> { type Item = &'a A::Element; type IntoIter = slice::Iter<'a, A::Element>; fn into_iter(self) -> Self::IntoIter { self.iter() } } impl<'a, A: Array> IntoIterator for &'a mut AccumulateVec<A> { type Item = &'a mut A::Element; type IntoIter = slice::IterMut<'a, A::Element>; fn into_iter(self) -> Self::IntoIter { self.iter_mut() } } impl<A: Array> From<Vec<A::Element>> for AccumulateVec<A> { fn from(v: Vec<A::Element>) -> AccumulateVec<A> { AccumulateVec::many(v) } } impl<A: Array> Default for AccumulateVec<A> { fn default() -> AccumulateVec<A> { AccumulateVec::new() } } impl<A> Encodable for AccumulateVec<A> where A: Array, A::Element: Encodable { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_seq(self.len(), |s| { for (i, e) in self.iter().enumerate() { s.emit_seq_elt(i, |s| e.encode(s))?; } Ok(()) }) } } impl<A> Decodable for AccumulateVec<A> where A: Array, A::Element: Decodable { fn decode<D: Decoder>(d: &mut D) -> Result<AccumulateVec<A>, D::Error> { d.read_seq(|d, len| { (0..len).map(|i| d.read_seq_elt(i, |d| Decodable::decode(d))).collect() }) } }
0
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps
solana_public_repos/solana-playground/solana-playground/wasm/rustfmt/deps/librustc_data_structures/sip128.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This is a copy of `core::hash::sip` adapted to providing 128 bit hashes. use std::cmp; use std::hash::Hasher; use std::slice; use std::ptr; use std::mem; #[derive(Debug, Clone)] pub struct SipHasher128 { k0: u64, k1: u64, length: usize, // how many bytes we've processed state: State, // hash State tail: u64, // unprocessed bytes le ntail: usize, // how many bytes in tail are valid } #[derive(Debug, Clone, Copy)] #[repr(C)] struct State { // v0, v2 and v1, v3 show up in pairs in the algorithm, // and simd implementations of SipHash will use vectors // of v02 and v13. By placing them in this order in the struct, // the compiler can pick up on just a few simd optimizations by itself. v0: u64, v2: u64, v1: u64, v3: u64, } macro_rules! compress { ($state:expr) => ({ compress!($state.v0, $state.v1, $state.v2, $state.v3) }); ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => ({ $v0 = $v0.wrapping_add($v1); $v1 = $v1.rotate_left(13); $v1 ^= $v0; $v0 = $v0.rotate_left(32); $v2 = $v2.wrapping_add($v3); $v3 = $v3.rotate_left(16); $v3 ^= $v2; $v0 = $v0.wrapping_add($v3); $v3 = $v3.rotate_left(21); $v3 ^= $v0; $v2 = $v2.wrapping_add($v1); $v1 = $v1.rotate_left(17); $v1 ^= $v2; $v2 = $v2.rotate_left(32); }); } /// Load an integer of the desired type from a byte stream, in LE order. Uses /// `copy_nonoverlapping` to let the compiler generate the most efficient way /// to load it from a possibly unaligned address. /// /// Unsafe because: unchecked indexing at i..i+size_of(int_ty) macro_rules! load_int_le { ($buf:expr, $i:expr, $int_ty:ident) => ({ debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len()); let mut data = 0 as $int_ty; ptr::copy_nonoverlapping($buf.get_unchecked($i), &mut data as *mut _ as *mut u8, mem::size_of::<$int_ty>()); data.to_le() }); } /// Load an u64 using up to 7 bytes of a byte slice. /// /// Unsafe because: unchecked indexing at start..start+len #[inline] unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { debug_assert!(len < 8); let mut i = 0; // current byte index (from LSB) in the output u64 let mut out = 0; if i + 3 < len { out = load_int_le!(buf, start + i, u32) as u64; i += 4; } if i + 1 < len { out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8); i += 2 } if i < len { out |= (*buf.get_unchecked(start + i) as u64) << (i * 8); i += 1; } debug_assert_eq!(i, len); out } impl SipHasher128 { #[inline] pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher128 { let mut state = SipHasher128 { k0: key0, k1: key1, length: 0, state: State { v0: 0, v1: 0, v2: 0, v3: 0, }, tail: 0, ntail: 0, }; state.reset(); state } #[inline] fn reset(&mut self) { self.length = 0; self.state.v0 = self.k0 ^ 0x736f6d6570736575; self.state.v1 = self.k1 ^ 0x646f72616e646f6d; self.state.v2 = self.k0 ^ 0x6c7967656e657261; self.state.v3 = self.k1 ^ 0x7465646279746573; self.ntail = 0; // This is only done in the 128 bit version: self.state.v1 ^= 0xee; } // Specialized write function that is only valid for buffers with len <= 8. // It's used to force inlining of write_u8 and write_usize, those would normally be inlined // except for composite types (that includes slices and str hashing because of delimiter). // Without this extra push the compiler is very reluctant to inline delimiter writes, // degrading performance substantially for the most common use cases. #[inline] fn short_write(&mut self, msg: &[u8]) { debug_assert!(msg.len() <= 8); let length = msg.len(); self.length += length; let needed = 8 - self.ntail; let fill = cmp::min(length, needed); if fill == 8 { self.tail = unsafe { load_int_le!(msg, 0, u64) }; } else { self.tail |= unsafe { u8to64_le(msg, 0, fill) } << (8 * self.ntail); if length < needed { self.ntail += length; return; } } self.state.v3 ^= self.tail; Sip24Rounds::c_rounds(&mut self.state); self.state.v0 ^= self.tail; // Buffered tail is now flushed, process new input. self.ntail = length - needed; self.tail = unsafe { u8to64_le(msg, needed, self.ntail) }; } #[inline(always)] fn short_write_gen<T>(&mut self, x: T) { let bytes = unsafe { slice::from_raw_parts(&x as *const T as *const u8, mem::size_of::<T>()) }; self.short_write(bytes); } #[inline] pub fn finish128(mut self) -> (u64, u64) { let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail; self.state.v3 ^= b; Sip24Rounds::c_rounds(&mut self.state); self.state.v0 ^= b; self.state.v2 ^= 0xee; Sip24Rounds::d_rounds(&mut self.state); let _0 = self.state.v0 ^ self.state.v1 ^ self.state.v2 ^ self.state.v3; self.state.v1 ^= 0xdd; Sip24Rounds::d_rounds(&mut self.state); let _1 = self.state.v0 ^ self.state.v1 ^ self.state.v2 ^ self.state.v3; (_0, _1) } } impl Hasher for SipHasher128 { #[inline] fn write_u8(&mut self, i: u8) { self.short_write_gen(i); } #[inline] fn write_u16(&mut self, i: u16) { self.short_write_gen(i); } #[inline] fn write_u32(&mut self, i: u32) { self.short_write_gen(i); } #[inline] fn write_u64(&mut self, i: u64) { self.short_write_gen(i); } #[inline] fn write_usize(&mut self, i: usize) { self.short_write_gen(i); } #[inline] fn write_i8(&mut self, i: i8) { self.short_write_gen(i); } #[inline] fn write_i16(&mut self, i: i16) { self.short_write_gen(i); } #[inline] fn write_i32(&mut self, i: i32) { self.short_write_gen(i); } #[inline] fn write_i64(&mut self, i: i64) { self.short_write_gen(i); } #[inline] fn write_isize(&mut self, i: isize) { self.short_write_gen(i); } #[inline] fn write(&mut self, msg: &[u8]) { let length = msg.len(); self.length += length; let mut needed = 0; if self.ntail != 0 { needed = 8 - self.ntail; self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail; if length < needed { self.ntail += length; return } else { self.state.v3 ^= self.tail; Sip24Rounds::c_rounds(&mut self.state); self.state.v0 ^= self.tail; self.ntail = 0; } } // Buffered tail is now flushed, process new input. let len = length - needed; let left = len & 0x7; let mut i = needed; while i < len - left { let mi = unsafe { load_int_le!(msg, i, u64) }; self.state.v3 ^= mi; Sip24Rounds::c_rounds(&mut self.state); self.state.v0 ^= mi; i += 8; } self.tail = unsafe { u8to64_le(msg, i, left) }; self.ntail = left; } fn finish(&self) -> u64 { panic!("SipHasher128 cannot provide valid 64 bit hashes") } } #[derive(Debug, Clone, Default)] struct Sip24Rounds; impl Sip24Rounds { #[inline] fn c_rounds(state: &mut State) { compress!(state); compress!(state); } #[inline] fn d_rounds(state: &mut State) { compress!(state); compress!(state); compress!(state); compress!(state); } } #[cfg(test)] mod test { use std::hash::{Hash, Hasher}; use std::{slice, mem}; use super::SipHasher128; // Hash just the bytes of the slice, without length prefix struct Bytes<'a>(&'a [u8]); impl<'a> Hash for Bytes<'a> { #[allow(unused_must_use)] fn hash<H: Hasher>(&self, state: &mut H) { for byte in self.0 { state.write_u8(*byte); } } } fn hash_with<T: Hash>(mut st: SipHasher128, x: &T) -> (u64, u64) { x.hash(&mut st); st.finish128() } fn hash<T: Hash>(x: &T) -> (u64, u64) { hash_with(SipHasher128::new_with_keys(0, 0), x) } const TEST_VECTOR : [[u8; 16]; 64] = [ [0xa3,0x81,0x7f,0x04,0xba,0x25,0xa8,0xe6,0x6d,0xf6,0x72,0x14,0xc7,0x55,0x02,0x93], [0xda,0x87,0xc1,0xd8,0x6b,0x99,0xaf,0x44,0x34,0x76,0x59,0x11,0x9b,0x22,0xfc,0x45], [0x81,0x77,0x22,0x8d,0xa4,0xa4,0x5d,0xc7,0xfc,0xa3,0x8b,0xde,0xf6,0x0a,0xff,0xe4], [0x9c,0x70,0xb6,0x0c,0x52,0x67,0xa9,0x4e,0x5f,0x33,0xb6,0xb0,0x29,0x85,0xed,0x51], [0xf8,0x81,0x64,0xc1,0x2d,0x9c,0x8f,0xaf,0x7d,0x0f,0x6e,0x7c,0x7b,0xcd,0x55,0x79], [0x13,0x68,0x87,0x59,0x80,0x77,0x6f,0x88,0x54,0x52,0x7a,0x07,0x69,0x0e,0x96,0x27], [0x14,0xee,0xca,0x33,0x8b,0x20,0x86,0x13,0x48,0x5e,0xa0,0x30,0x8f,0xd7,0xa1,0x5e], [0xa1,0xf1,0xeb,0xbe,0xd8,0xdb,0xc1,0x53,0xc0,0xb8,0x4a,0xa6,0x1f,0xf0,0x82,0x39], [0x3b,0x62,0xa9,0xba,0x62,0x58,0xf5,0x61,0x0f,0x83,0xe2,0x64,0xf3,0x14,0x97,0xb4], [0x26,0x44,0x99,0x06,0x0a,0xd9,0xba,0xab,0xc4,0x7f,0x8b,0x02,0xbb,0x6d,0x71,0xed], [0x00,0x11,0x0d,0xc3,0x78,0x14,0x69,0x56,0xc9,0x54,0x47,0xd3,0xf3,0xd0,0xfb,0xba], [0x01,0x51,0xc5,0x68,0x38,0x6b,0x66,0x77,0xa2,0xb4,0xdc,0x6f,0x81,0xe5,0xdc,0x18], [0xd6,0x26,0xb2,0x66,0x90,0x5e,0xf3,0x58,0x82,0x63,0x4d,0xf6,0x85,0x32,0xc1,0x25], [0x98,0x69,0xe2,0x47,0xe9,0xc0,0x8b,0x10,0xd0,0x29,0x93,0x4f,0xc4,0xb9,0x52,0xf7], [0x31,0xfc,0xef,0xac,0x66,0xd7,0xde,0x9c,0x7e,0xc7,0x48,0x5f,0xe4,0x49,0x49,0x02], [0x54,0x93,0xe9,0x99,0x33,0xb0,0xa8,0x11,0x7e,0x08,0xec,0x0f,0x97,0xcf,0xc3,0xd9], [0x6e,0xe2,0xa4,0xca,0x67,0xb0,0x54,0xbb,0xfd,0x33,0x15,0xbf,0x85,0x23,0x05,0x77], [0x47,0x3d,0x06,0xe8,0x73,0x8d,0xb8,0x98,0x54,0xc0,0x66,0xc4,0x7a,0xe4,0x77,0x40], [0xa4,0x26,0xe5,0xe4,0x23,0xbf,0x48,0x85,0x29,0x4d,0xa4,0x81,0xfe,0xae,0xf7,0x23], [0x78,0x01,0x77,0x31,0xcf,0x65,0xfa,0xb0,0x74,0xd5,0x20,0x89,0x52,0x51,0x2e,0xb1], [0x9e,0x25,0xfc,0x83,0x3f,0x22,0x90,0x73,0x3e,0x93,0x44,0xa5,0xe8,0x38,0x39,0xeb], [0x56,0x8e,0x49,0x5a,0xbe,0x52,0x5a,0x21,0x8a,0x22,0x14,0xcd,0x3e,0x07,0x1d,0x12], [0x4a,0x29,0xb5,0x45,0x52,0xd1,0x6b,0x9a,0x46,0x9c,0x10,0x52,0x8e,0xff,0x0a,0xae], [0xc9,0xd1,0x84,0xdd,0xd5,0xa9,0xf5,0xe0,0xcf,0x8c,0xe2,0x9a,0x9a,0xbf,0x69,0x1c], [0x2d,0xb4,0x79,0xae,0x78,0xbd,0x50,0xd8,0x88,0x2a,0x8a,0x17,0x8a,0x61,0x32,0xad], [0x8e,0xce,0x5f,0x04,0x2d,0x5e,0x44,0x7b,0x50,0x51,0xb9,0xea,0xcb,0x8d,0x8f,0x6f], [0x9c,0x0b,0x53,0xb4,0xb3,0xc3,0x07,0xe8,0x7e,0xae,0xe0,0x86,0x78,0x14,0x1f,0x66], [0xab,0xf2,0x48,0xaf,0x69,0xa6,0xea,0xe4,0xbf,0xd3,0xeb,0x2f,0x12,0x9e,0xeb,0x94], [0x06,0x64,0xda,0x16,0x68,0x57,0x4b,0x88,0xb9,0x35,0xf3,0x02,0x73,0x58,0xae,0xf4], [0xaa,0x4b,0x9d,0xc4,0xbf,0x33,0x7d,0xe9,0x0c,0xd4,0xfd,0x3c,0x46,0x7c,0x6a,0xb7], [0xea,0x5c,0x7f,0x47,0x1f,0xaf,0x6b,0xde,0x2b,0x1a,0xd7,0xd4,0x68,0x6d,0x22,0x87], [0x29,0x39,0xb0,0x18,0x32,0x23,0xfa,0xfc,0x17,0x23,0xde,0x4f,0x52,0xc4,0x3d,0x35], [0x7c,0x39,0x56,0xca,0x5e,0xea,0xfc,0x3e,0x36,0x3e,0x9d,0x55,0x65,0x46,0xeb,0x68], [0x77,0xc6,0x07,0x71,0x46,0xf0,0x1c,0x32,0xb6,0xb6,0x9d,0x5f,0x4e,0xa9,0xff,0xcf], [0x37,0xa6,0x98,0x6c,0xb8,0x84,0x7e,0xdf,0x09,0x25,0xf0,0xf1,0x30,0x9b,0x54,0xde], [0xa7,0x05,0xf0,0xe6,0x9d,0xa9,0xa8,0xf9,0x07,0x24,0x1a,0x2e,0x92,0x3c,0x8c,0xc8], [0x3d,0xc4,0x7d,0x1f,0x29,0xc4,0x48,0x46,0x1e,0x9e,0x76,0xed,0x90,0x4f,0x67,0x11], [0x0d,0x62,0xbf,0x01,0xe6,0xfc,0x0e,0x1a,0x0d,0x3c,0x47,0x51,0xc5,0xd3,0x69,0x2b], [0x8c,0x03,0x46,0x8b,0xca,0x7c,0x66,0x9e,0xe4,0xfd,0x5e,0x08,0x4b,0xbe,0xe7,0xb5], [0x52,0x8a,0x5b,0xb9,0x3b,0xaf,0x2c,0x9c,0x44,0x73,0xcc,0xe5,0xd0,0xd2,0x2b,0xd9], [0xdf,0x6a,0x30,0x1e,0x95,0xc9,0x5d,0xad,0x97,0xae,0x0c,0xc8,0xc6,0x91,0x3b,0xd8], [0x80,0x11,0x89,0x90,0x2c,0x85,0x7f,0x39,0xe7,0x35,0x91,0x28,0x5e,0x70,0xb6,0xdb], [0xe6,0x17,0x34,0x6a,0xc9,0xc2,0x31,0xbb,0x36,0x50,0xae,0x34,0xcc,0xca,0x0c,0x5b], [0x27,0xd9,0x34,0x37,0xef,0xb7,0x21,0xaa,0x40,0x18,0x21,0xdc,0xec,0x5a,0xdf,0x89], [0x89,0x23,0x7d,0x9d,0xed,0x9c,0x5e,0x78,0xd8,0xb1,0xc9,0xb1,0x66,0xcc,0x73,0x42], [0x4a,0x6d,0x80,0x91,0xbf,0x5e,0x7d,0x65,0x11,0x89,0xfa,0x94,0xa2,0x50,0xb1,0x4c], [0x0e,0x33,0xf9,0x60,0x55,0xe7,0xae,0x89,0x3f,0xfc,0x0e,0x3d,0xcf,0x49,0x29,0x02], [0xe6,0x1c,0x43,0x2b,0x72,0x0b,0x19,0xd1,0x8e,0xc8,0xd8,0x4b,0xdc,0x63,0x15,0x1b], [0xf7,0xe5,0xae,0xf5,0x49,0xf7,0x82,0xcf,0x37,0x90,0x55,0xa6,0x08,0x26,0x9b,0x16], [0x43,0x8d,0x03,0x0f,0xd0,0xb7,0xa5,0x4f,0xa8,0x37,0xf2,0xad,0x20,0x1a,0x64,0x03], [0xa5,0x90,0xd3,0xee,0x4f,0xbf,0x04,0xe3,0x24,0x7e,0x0d,0x27,0xf2,0x86,0x42,0x3f], [0x5f,0xe2,0xc1,0xa1,0x72,0xfe,0x93,0xc4,0xb1,0x5c,0xd3,0x7c,0xae,0xf9,0xf5,0x38], [0x2c,0x97,0x32,0x5c,0xbd,0x06,0xb3,0x6e,0xb2,0x13,0x3d,0xd0,0x8b,0x3a,0x01,0x7c], [0x92,0xc8,0x14,0x22,0x7a,0x6b,0xca,0x94,0x9f,0xf0,0x65,0x9f,0x00,0x2a,0xd3,0x9e], [0xdc,0xe8,0x50,0x11,0x0b,0xd8,0x32,0x8c,0xfb,0xd5,0x08,0x41,0xd6,0x91,0x1d,0x87], [0x67,0xf1,0x49,0x84,0xc7,0xda,0x79,0x12,0x48,0xe3,0x2b,0xb5,0x92,0x25,0x83,0xda], [0x19,0x38,0xf2,0xcf,0x72,0xd5,0x4e,0xe9,0x7e,0x94,0x16,0x6f,0xa9,0x1d,0x2a,0x36], [0x74,0x48,0x1e,0x96,0x46,0xed,0x49,0xfe,0x0f,0x62,0x24,0x30,0x16,0x04,0x69,0x8e], [0x57,0xfc,0xa5,0xde,0x98,0xa9,0xd6,0xd8,0x00,0x64,0x38,0xd0,0x58,0x3d,0x8a,0x1d], [0x9f,0xec,0xde,0x1c,0xef,0xdc,0x1c,0xbe,0xd4,0x76,0x36,0x74,0xd9,0x57,0x53,0x59], [0xe3,0x04,0x0c,0x00,0xeb,0x28,0xf1,0x53,0x66,0xca,0x73,0xcb,0xd8,0x72,0xe7,0x40], [0x76,0x97,0x00,0x9a,0x6a,0x83,0x1d,0xfe,0xcc,0xa9,0x1c,0x59,0x93,0x67,0x0f,0x7a], [0x58,0x53,0x54,0x23,0x21,0xf5,0x67,0xa0,0x05,0xd5,0x47,0xa4,0xf0,0x47,0x59,0xbd], [0x51,0x50,0xd1,0x77,0x2f,0x50,0x83,0x4a,0x50,0x3e,0x06,0x9a,0x97,0x3f,0xbd,0x7c], ]; // Test vector from reference implementation #[test] fn test_siphash_2_4_test_vector() { let k0 = 0x_07_06_05_04_03_02_01_00; let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08; let mut input: Vec<u8> = Vec::new(); for i in 0 .. 64 { let out = hash_with(SipHasher128::new_with_keys(k0, k1), &Bytes(&input[..])); let expected = ( ((TEST_VECTOR[i][0] as u64) << 0) | ((TEST_VECTOR[i][1] as u64) << 8) | ((TEST_VECTOR[i][2] as u64) << 16) | ((TEST_VECTOR[i][3] as u64) << 24) | ((TEST_VECTOR[i][4] as u64) << 32) | ((TEST_VECTOR[i][5] as u64) << 40) | ((TEST_VECTOR[i][6] as u64) << 48) | ((TEST_VECTOR[i][7] as u64) << 56), ((TEST_VECTOR[i][8] as u64) << 0) | ((TEST_VECTOR[i][9] as u64) << 8) | ((TEST_VECTOR[i][10] as u64) << 16) | ((TEST_VECTOR[i][11] as u64) << 24) | ((TEST_VECTOR[i][12] as u64) << 32) | ((TEST_VECTOR[i][13] as u64) << 40) | ((TEST_VECTOR[i][14] as u64) << 48) | ((TEST_VECTOR[i][15] as u64) << 56), ); assert_eq!(out, expected); input.push(i as u8); } } #[test] #[cfg(target_arch = "arm")] fn test_hash_usize() { let val = 0xdeadbeef_deadbeef_u64; assert!(hash(&(val as u64)) != hash(&(val as usize))); assert_eq!(hash(&(val as u32)), hash(&(val as usize))); } #[test] #[cfg(target_arch = "x86_64")] fn test_hash_usize() { let val = 0xdeadbeef_deadbeef_u64; assert_eq!(hash(&(val as u64)), hash(&(val as usize))); assert!(hash(&(val as u32)) != hash(&(val as usize))); } #[test] #[cfg(target_arch = "x86")] fn test_hash_usize() { let val = 0xdeadbeef_deadbeef_u64; assert!(hash(&(val as u64)) != hash(&(val as usize))); assert_eq!(hash(&(val as u32)), hash(&(val as usize))); } #[test] fn test_hash_idempotent() { let val64 = 0xdeadbeef_deadbeef_u64; assert_eq!(hash(&val64), hash(&val64)); let val32 = 0xdeadbeef_u32; assert_eq!(hash(&val32), hash(&val32)); } #[test] fn test_hash_no_bytes_dropped_64() { let val = 0xdeadbeef_deadbeef_u64; assert!(hash(&val) != hash(&zero_byte(val, 0))); assert!(hash(&val) != hash(&zero_byte(val, 1))); assert!(hash(&val) != hash(&zero_byte(val, 2))); assert!(hash(&val) != hash(&zero_byte(val, 3))); assert!(hash(&val) != hash(&zero_byte(val, 4))); assert!(hash(&val) != hash(&zero_byte(val, 5))); assert!(hash(&val) != hash(&zero_byte(val, 6))); assert!(hash(&val) != hash(&zero_byte(val, 7))); fn zero_byte(val: u64, byte: usize) -> u64 { assert!(byte < 8); val & !(0xff << (byte * 8)) } } #[test] fn test_hash_no_bytes_dropped_32() { let val = 0xdeadbeef_u32; assert!(hash(&val) != hash(&zero_byte(val, 0))); assert!(hash(&val) != hash(&zero_byte(val, 1))); assert!(hash(&val) != hash(&zero_byte(val, 2))); assert!(hash(&val) != hash(&zero_byte(val, 3))); fn zero_byte(val: u32, byte: usize) -> u32 { assert!(byte < 4); val & !(0xff << (byte * 8)) } } #[test] fn test_hash_no_concat_alias() { let s = ("aa", "bb"); let t = ("aabb", ""); let u = ("a", "abb"); assert!(s != t && t != u); assert!(hash(&s) != hash(&t) && hash(&s) != hash(&u)); let u = [1, 0, 0, 0]; let v = (&u[..1], &u[1..3], &u[3..]); let w = (&u[..], &u[4..4], &u[4..4]); assert!(v != w); assert!(hash(&v) != hash(&w)); } #[test] fn test_write_short_works() { let test_usize = 0xd0c0b0a0usize; let mut h1 = SipHasher128::new_with_keys(0, 0); h1.write_usize(test_usize); h1.write(b"bytes"); h1.write(b"string"); h1.write_u8(0xFFu8); h1.write_u8(0x01u8); let mut h2 = SipHasher128::new_with_keys(0, 0); h2.write(unsafe { slice::from_raw_parts(&test_usize as *const _ as *const u8, mem::size_of::<usize>()) }); h2.write(b"bytes"); h2.write(b"string"); h2.write(&[0xFFu8, 0x01u8]); assert_eq!(h1.finish128(), h2.finish128()); } }
0