repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/h1/timer.rs | actix-http/src/h1/timer.rs | use std::{fmt, future::Future, pin::Pin, task::Context};
use actix_rt::time::{Instant, Sleep};
use tracing::trace;
#[derive(Debug)]
pub(super) enum TimerState {
Disabled,
Inactive,
Active { timer: Pin<Box<Sleep>> },
}
impl TimerState {
pub(super) fn new(enabled: bool) -> Self {
if enabled {
Self::Inactive
} else {
Self::Disabled
}
}
pub(super) fn is_enabled(&self) -> bool {
matches!(self, Self::Active { .. } | Self::Inactive)
}
pub(super) fn set(&mut self, timer: Sleep, line: u32) {
if matches!(self, Self::Disabled) {
trace!("setting disabled timer from line {}", line);
}
*self = Self::Active {
timer: Box::pin(timer),
};
}
pub(super) fn set_and_init(&mut self, cx: &mut Context<'_>, timer: Sleep, line: u32) {
self.set(timer, line);
self.init(cx);
}
pub(super) fn clear(&mut self, line: u32) {
if matches!(self, Self::Disabled) {
trace!("trying to clear a disabled timer from line {}", line);
}
if matches!(self, Self::Inactive) {
trace!("trying to clear an inactive timer from line {}", line);
}
*self = Self::Inactive;
}
pub(super) fn init(&mut self, cx: &mut Context<'_>) {
if let TimerState::Active { timer } = self {
let _ = timer.as_mut().poll(cx);
}
}
}
impl fmt::Display for TimerState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TimerState::Disabled => f.write_str("timer is disabled"),
TimerState::Inactive => f.write_str("timer is inactive"),
TimerState::Active { timer } => {
let deadline = timer.deadline();
let now = Instant::now();
if deadline < now {
f.write_str("timer is active and has reached deadline")
} else {
write!(
f,
"timer is active and due to expire in {} milliseconds",
((deadline - now).as_secs_f32() * 1000.0)
)
}
}
}
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/h1/expect.rs | actix-http/src/h1/expect.rs | use actix_service::{Service, ServiceFactory};
use actix_utils::future::{ready, Ready};
use crate::{Error, Request};
pub struct ExpectHandler;
impl ServiceFactory<Request> for ExpectHandler {
type Response = Request;
type Error = Error;
type Config = ();
type Service = ExpectHandler;
type InitError = Error;
type Future = Ready<Result<Self::Service, Self::InitError>>;
fn new_service(&self, _: Self::Config) -> Self::Future {
ready(Ok(ExpectHandler))
}
}
impl Service<Request> for ExpectHandler {
type Response = Request;
type Error = Error;
type Future = Ready<Result<Self::Response, Self::Error>>;
actix_service::always_ready!();
fn call(&self, req: Request) -> Self::Future {
ready(Ok(req))
// TODO: add some way to trigger error
// Err(error::ErrorExpectationFailed("test"))
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/h1/utils.rs | actix-http/src/h1/utils.rs | use std::{
future::Future,
pin::Pin,
task::{Context, Poll},
};
use actix_codec::{AsyncRead, AsyncWrite, Framed};
use pin_project_lite::pin_project;
use crate::{
body::{BodySize, MessageBody},
h1::{Codec, Message},
Error, Response,
};
pin_project! {
/// Send HTTP/1 response
pub struct SendResponse<T, B> {
res: Option<Message<(Response<()>, BodySize)>>,
#[pin]
body: Option<B>,
#[pin]
framed: Option<Framed<T, Codec>>,
}
}
impl<T, B> SendResponse<T, B>
where
B: MessageBody,
B::Error: Into<Error>,
{
pub fn new(framed: Framed<T, Codec>, response: Response<B>) -> Self {
let (res, body) = response.into_parts();
SendResponse {
res: Some((res, body.size()).into()),
body: Some(body),
framed: Some(framed),
}
}
}
impl<T, B> Future for SendResponse<T, B>
where
T: AsyncRead + AsyncWrite + Unpin,
B: MessageBody,
B::Error: Into<Error>,
{
type Output = Result<Framed<T, Codec>, Error>;
// TODO: rethink if we need loops in polls
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let mut this = self.as_mut().project();
let mut body_done = this.body.is_none();
loop {
let mut body_ready = !body_done;
// send body
if this.res.is_none() && body_ready {
while body_ready
&& !body_done
&& !this
.framed
.as_ref()
.as_pin_ref()
.unwrap()
.is_write_buf_full()
{
let next = match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
Poll::Ready(Some(Err(err))) => return Poll::Ready(Err(err.into())),
Poll::Ready(None) => Poll::Ready(None),
Poll::Pending => Poll::Pending,
};
match next {
Poll::Ready(item) => {
// body is done when item is None
body_done = item.is_none();
if body_done {
this.body.set(None);
}
let framed = this.framed.as_mut().as_pin_mut().unwrap();
framed
.write(Message::Chunk(item))
.map_err(|err| Error::new_send_response().with_cause(err))?;
}
Poll::Pending => body_ready = false,
}
}
}
let framed = this.framed.as_mut().as_pin_mut().unwrap();
// flush write buffer
if !framed.is_write_buf_empty() {
match framed
.flush(cx)
.map_err(|err| Error::new_send_response().with_cause(err))?
{
Poll::Ready(_) => {
if body_ready {
continue;
} else {
return Poll::Pending;
}
}
Poll::Pending => return Poll::Pending,
}
}
// send response
if let Some(res) = this.res.take() {
framed
.write(res)
.map_err(|err| Error::new_send_response().with_cause(err))?;
continue;
}
if !body_done {
if body_ready {
continue;
} else {
return Poll::Pending;
}
} else {
break;
}
}
let framed = this.framed.take().unwrap();
Poll::Ready(Ok(framed))
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/h1/mod.rs | actix-http/src/h1/mod.rs | //! HTTP/1 protocol implementation.
use bytes::{Bytes, BytesMut};
mod chunked;
mod client;
mod codec;
mod decoder;
mod dispatcher;
#[cfg(test)]
mod dispatcher_tests;
mod encoder;
mod expect;
mod payload;
mod service;
mod timer;
mod upgrade;
mod utils;
pub use self::{
client::{ClientCodec, ClientPayloadCodec},
codec::Codec,
dispatcher::Dispatcher,
expect::ExpectHandler,
payload::Payload,
service::{H1Service, H1ServiceHandler},
upgrade::UpgradeHandler,
utils::SendResponse,
};
#[derive(Debug)]
/// Codec message
pub enum Message<T> {
/// HTTP message.
Item(T),
/// Payload chunk.
Chunk(Option<Bytes>),
}
impl<T> From<T> for Message<T> {
fn from(item: T) -> Self {
Message::Item(item)
}
}
/// Incoming request type
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum MessageType {
None,
Payload,
Stream,
}
const LW: usize = 2 * 1024;
const HW: usize = 32 * 1024;
pub(crate) fn reserve_readbuf(src: &mut BytesMut) {
let cap = src.capacity();
if cap < LW {
src.reserve(HW - cap);
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Request;
impl Message<Request> {
pub fn message(self) -> Request {
match self {
Message::Item(req) => req,
_ => panic!("error"),
}
}
pub fn chunk(self) -> Bytes {
match self {
Message::Chunk(Some(data)) => data,
_ => panic!("error"),
}
}
pub fn eof(self) -> bool {
match self {
Message::Chunk(None) => true,
Message::Chunk(Some(_)) => false,
_ => panic!("error"),
}
}
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/h1/payload.rs | actix-http/src/h1/payload.rs | //! Payload stream
use std::{
cell::RefCell,
collections::VecDeque,
pin::Pin,
rc::{Rc, Weak},
task::{Context, Poll, Waker},
};
use bytes::Bytes;
use futures_core::Stream;
use crate::error::PayloadError;
/// max buffer size 32k
pub(crate) const MAX_BUFFER_SIZE: usize = 32_768;
#[derive(Debug, PartialEq, Eq)]
pub enum PayloadStatus {
Read,
Pause,
Dropped,
}
/// Buffered stream of bytes chunks
///
/// Payload stores chunks in a vector. First chunk can be received with `poll_next`. Payload does
/// not notify current task when new data is available.
///
/// Payload can be used as `Response` body stream.
#[derive(Debug)]
pub struct Payload {
inner: Rc<RefCell<Inner>>,
}
impl Payload {
/// Creates a payload stream.
///
/// This method construct two objects responsible for bytes stream generation:
/// - `PayloadSender` - *Sender* side of the stream
/// - `Payload` - *Receiver* side of the stream
pub fn create(eof: bool) -> (PayloadSender, Payload) {
let shared = Rc::new(RefCell::new(Inner::new(eof)));
(
PayloadSender::new(Rc::downgrade(&shared)),
Payload { inner: shared },
)
}
/// Creates an empty payload.
pub(crate) fn empty() -> Payload {
Payload {
inner: Rc::new(RefCell::new(Inner::new(true))),
}
}
/// Length of the data in this payload
#[cfg(test)]
pub fn len(&self) -> usize {
self.inner.borrow().len()
}
/// Is payload empty
#[cfg(test)]
pub fn is_empty(&self) -> bool {
self.inner.borrow().len() == 0
}
/// Put unused data back to payload
#[inline]
pub fn unread_data(&mut self, data: Bytes) {
self.inner.borrow_mut().unread_data(data);
}
}
impl Stream for Payload {
type Item = Result<Bytes, PayloadError>;
fn poll_next(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, PayloadError>>> {
Pin::new(&mut *self.inner.borrow_mut()).poll_next(cx)
}
}
/// Sender part of the payload stream
pub struct PayloadSender {
inner: Weak<RefCell<Inner>>,
}
impl PayloadSender {
fn new(inner: Weak<RefCell<Inner>>) -> Self {
Self { inner }
}
#[inline]
pub fn set_error(&mut self, err: PayloadError) {
if let Some(shared) = self.inner.upgrade() {
shared.borrow_mut().set_error(err)
}
}
#[inline]
pub fn feed_eof(&mut self) {
if let Some(shared) = self.inner.upgrade() {
shared.borrow_mut().feed_eof()
}
}
#[inline]
pub fn feed_data(&mut self, data: Bytes) {
if let Some(shared) = self.inner.upgrade() {
shared.borrow_mut().feed_data(data)
}
}
#[allow(clippy::needless_pass_by_ref_mut)]
#[inline]
pub fn need_read(&self, cx: &mut Context<'_>) -> PayloadStatus {
// we check need_read only if Payload (other side) is alive,
// otherwise always return true (consume payload)
if let Some(shared) = self.inner.upgrade() {
if shared.borrow().need_read {
PayloadStatus::Read
} else {
shared.borrow_mut().register_io(cx);
PayloadStatus::Pause
}
} else {
PayloadStatus::Dropped
}
}
}
#[derive(Debug)]
struct Inner {
len: usize,
eof: bool,
err: Option<PayloadError>,
need_read: bool,
items: VecDeque<Bytes>,
task: Option<Waker>,
io_task: Option<Waker>,
}
impl Inner {
fn new(eof: bool) -> Self {
Inner {
eof,
len: 0,
err: None,
items: VecDeque::new(),
need_read: true,
task: None,
io_task: None,
}
}
/// Wake up future waiting for payload data to be available.
fn wake(&mut self) {
if let Some(waker) = self.task.take() {
waker.wake();
}
}
/// Wake up future feeding data to Payload.
fn wake_io(&mut self) {
if let Some(waker) = self.io_task.take() {
waker.wake();
}
}
/// Register future waiting data from payload.
/// Waker would be used in `Inner::wake`
fn register(&mut self, cx: &Context<'_>) {
if self.task.as_ref().is_none_or(|w| !cx.waker().will_wake(w)) {
self.task = Some(cx.waker().clone());
}
}
// Register future feeding data to payload.
/// Waker would be used in `Inner::wake_io`
fn register_io(&mut self, cx: &Context<'_>) {
if self
.io_task
.as_ref()
.is_none_or(|w| !cx.waker().will_wake(w))
{
self.io_task = Some(cx.waker().clone());
}
}
#[inline]
fn set_error(&mut self, err: PayloadError) {
self.err = Some(err);
self.wake();
}
#[inline]
fn feed_eof(&mut self) {
self.eof = true;
self.wake();
}
#[inline]
fn feed_data(&mut self, data: Bytes) {
self.len += data.len();
self.items.push_back(data);
self.need_read = self.len < MAX_BUFFER_SIZE;
self.wake();
}
#[cfg(test)]
fn len(&self) -> usize {
self.len
}
fn poll_next(
mut self: Pin<&mut Self>,
cx: &Context<'_>,
) -> Poll<Option<Result<Bytes, PayloadError>>> {
if let Some(data) = self.items.pop_front() {
self.len -= data.len();
self.need_read = self.len < MAX_BUFFER_SIZE;
if self.need_read && !self.eof {
self.register(cx);
}
self.wake_io();
Poll::Ready(Some(Ok(data)))
} else if let Some(err) = self.err.take() {
Poll::Ready(Some(Err(err)))
} else if self.eof {
Poll::Ready(None)
} else {
self.need_read = true;
self.register(cx);
self.wake_io();
Poll::Pending
}
}
fn unread_data(&mut self, data: Bytes) {
self.len += data.len();
self.items.push_front(data);
}
}
#[cfg(test)]
mod tests {
use std::{task::Poll, time::Duration};
use actix_rt::time::timeout;
use actix_utils::future::poll_fn;
use futures_util::{FutureExt, StreamExt};
use static_assertions::{assert_impl_all, assert_not_impl_any};
use tokio::sync::oneshot;
use super::*;
assert_impl_all!(Payload: Unpin);
assert_not_impl_any!(Payload: Send, Sync);
assert_impl_all!(Inner: Unpin, Send, Sync);
const WAKE_TIMEOUT: Duration = Duration::from_secs(2);
fn prepare_waking_test(
mut payload: Payload,
expected: Option<Result<(), ()>>,
) -> (oneshot::Receiver<()>, actix_rt::task::JoinHandle<()>) {
let (tx, rx) = oneshot::channel();
let handle = actix_rt::spawn(async move {
// Make sure to poll once to set the waker
poll_fn(|cx| {
assert!(payload.poll_next_unpin(cx).is_pending());
Poll::Ready(())
})
.await;
tx.send(()).unwrap();
// actix-rt is single-threaded, so this won't race with `rx.await`
let mut pend_once = false;
poll_fn(|_| {
if pend_once {
Poll::Ready(())
} else {
// Return pending without storing wakers, we already did on the previous
// `poll_fn`, now this task will only continue if the `sender` wakes us
pend_once = true;
Poll::Pending
}
})
.await;
let got = payload.next().now_or_never().unwrap();
match expected {
Some(Ok(_)) => assert!(got.unwrap().is_ok()),
Some(Err(_)) => assert!(got.unwrap().is_err()),
None => assert!(got.is_none()),
}
});
(rx, handle)
}
#[actix_rt::test]
async fn wake_on_error() {
let (mut sender, payload) = Payload::create(false);
let (rx, handle) = prepare_waking_test(payload, Some(Err(())));
rx.await.unwrap();
sender.set_error(PayloadError::Incomplete(None));
timeout(WAKE_TIMEOUT, handle).await.unwrap().unwrap();
}
#[actix_rt::test]
async fn wake_on_eof() {
let (mut sender, payload) = Payload::create(false);
let (rx, handle) = prepare_waking_test(payload, None);
rx.await.unwrap();
sender.feed_eof();
timeout(WAKE_TIMEOUT, handle).await.unwrap().unwrap();
}
#[actix_rt::test]
async fn test_unread_data() {
let (_, mut payload) = Payload::create(false);
payload.unread_data(Bytes::from("data"));
assert!(!payload.is_empty());
assert_eq!(payload.len(), 4);
assert_eq!(
Bytes::from("data"),
poll_fn(|cx| Pin::new(&mut payload).poll_next(cx))
.await
.unwrap()
.unwrap()
);
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/as_name.rs | actix-http/src/header/as_name.rs | //! Sealed [`AsHeaderName`] trait and implementations.
use std::{borrow::Cow, str::FromStr as _};
use http::header::{HeaderName, InvalidHeaderName};
/// Sealed trait implemented for types that can be effectively borrowed as a [`HeaderValue`].
///
/// [`HeaderValue`]: super::HeaderValue
pub trait AsHeaderName: Sealed {}
pub struct Seal;
pub trait Sealed {
fn try_as_name(&self, seal: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName>;
}
impl Sealed for HeaderName {
#[inline]
fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {
Ok(Cow::Borrowed(self))
}
}
impl AsHeaderName for HeaderName {}
impl Sealed for &HeaderName {
#[inline]
fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {
Ok(Cow::Borrowed(*self))
}
}
impl AsHeaderName for &HeaderName {}
impl Sealed for &str {
#[inline]
fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {
HeaderName::from_str(self).map(Cow::Owned)
}
}
impl AsHeaderName for &str {}
impl Sealed for String {
#[inline]
fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {
HeaderName::from_str(self).map(Cow::Owned)
}
}
impl AsHeaderName for String {}
impl Sealed for &String {
#[inline]
fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {
HeaderName::from_str(self).map(Cow::Owned)
}
}
impl AsHeaderName for &String {}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/into_value.rs | actix-http/src/header/into_value.rs | //! [`TryIntoHeaderValue`] trait and implementations.
use bytes::Bytes;
use http::{header::InvalidHeaderValue, Error as HttpError, HeaderValue};
use mime::Mime;
/// An interface for types that can be converted into a [`HeaderValue`].
pub trait TryIntoHeaderValue: Sized {
/// The type returned in the event of a conversion error.
type Error: Into<HttpError>;
/// Try to convert value to a HeaderValue.
fn try_into_value(self) -> Result<HeaderValue, Self::Error>;
}
impl TryIntoHeaderValue for HeaderValue {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
Ok(self)
}
}
impl TryIntoHeaderValue for &HeaderValue {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
Ok(self.clone())
}
}
impl TryIntoHeaderValue for &str {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
self.parse()
}
}
impl TryIntoHeaderValue for &[u8] {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::from_bytes(self)
}
}
impl TryIntoHeaderValue for Bytes {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::from_maybe_shared(self)
}
}
impl TryIntoHeaderValue for Vec<u8> {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::try_from(self)
}
}
impl TryIntoHeaderValue for String {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::try_from(self)
}
}
impl TryIntoHeaderValue for usize {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::try_from(self.to_string())
}
}
impl TryIntoHeaderValue for i64 {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::try_from(self.to_string())
}
}
impl TryIntoHeaderValue for u64 {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::try_from(self.to_string())
}
}
impl TryIntoHeaderValue for i32 {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::try_from(self.to_string())
}
}
impl TryIntoHeaderValue for u32 {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::try_from(self.to_string())
}
}
impl TryIntoHeaderValue for Mime {
type Error = InvalidHeaderValue;
#[inline]
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
HeaderValue::from_str(self.as_ref())
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/map.rs | actix-http/src/header/map.rs | //! A multi-value [`HeaderMap`] and its iterators.
use std::{borrow::Cow, collections::hash_map, iter, ops};
use foldhash::{HashMap as FoldHashMap, HashMapExt as _};
use http::header::{HeaderName, HeaderValue};
use smallvec::{smallvec, SmallVec};
use super::AsHeaderName;
/// A multi-map of HTTP headers.
///
/// `HeaderMap` is a "multi-map" of [`HeaderName`] to one or more [`HeaderValue`]s.
///
/// # Examples
///
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
///
/// let mut map = HeaderMap::new();
///
/// map.insert(header::CONTENT_TYPE, HeaderValue::from_static("text/plain"));
/// map.insert(header::ORIGIN, HeaderValue::from_static("example.com"));
///
/// assert!(map.contains_key(header::CONTENT_TYPE));
/// assert!(map.contains_key(header::ORIGIN));
///
/// let mut removed = map.remove(header::ORIGIN);
/// assert_eq!(removed.next().unwrap(), "example.com");
///
/// assert!(!map.contains_key(header::ORIGIN));
/// ```
///
/// Construct a header map using the [`FromIterator`] implementation. Note that it uses the append
/// strategy, so duplicate header names are preserved.
///
/// ```
/// use actix_http::header::{self, HeaderMap, HeaderValue};
///
/// let headers = HeaderMap::from_iter([
/// (header::CONTENT_TYPE, HeaderValue::from_static("text/plain")),
/// (header::COOKIE, HeaderValue::from_static("foo=1")),
/// (header::COOKIE, HeaderValue::from_static("bar=1")),
/// ]);
///
/// assert_eq!(headers.len(), 3);
/// ```
#[derive(Debug, Clone, Default)]
pub struct HeaderMap {
pub(crate) inner: FoldHashMap<HeaderName, Value>,
}
/// A bespoke non-empty list for HeaderMap values.
#[derive(Debug, Clone)]
pub(crate) struct Value {
inner: SmallVec<[HeaderValue; 4]>,
}
impl Value {
fn one(val: HeaderValue) -> Self {
Self {
inner: smallvec![val],
}
}
fn first(&self) -> &HeaderValue {
&self.inner[0]
}
fn first_mut(&mut self) -> &mut HeaderValue {
&mut self.inner[0]
}
fn append(&mut self, new_val: HeaderValue) {
self.inner.push(new_val)
}
}
impl ops::Deref for Value {
type Target = SmallVec<[HeaderValue; 4]>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl HeaderMap {
/// Create an empty `HeaderMap`.
///
/// The map will be created without any capacity; this function will not allocate.
///
/// # Examples
/// ```
/// # use actix_http::header::HeaderMap;
/// let map = HeaderMap::new();
///
/// assert!(map.is_empty());
/// assert_eq!(0, map.capacity());
/// ```
pub fn new() -> Self {
HeaderMap::default()
}
/// Create an empty `HeaderMap` with the specified capacity.
///
/// The map will be able to hold at least `capacity` elements without needing to reallocate.
/// If `capacity` is 0, the map will be created without allocating.
///
/// # Examples
/// ```
/// # use actix_http::header::HeaderMap;
/// let map = HeaderMap::with_capacity(16);
///
/// assert!(map.is_empty());
/// assert!(map.capacity() >= 16);
/// ```
pub fn with_capacity(capacity: usize) -> Self {
HeaderMap {
inner: FoldHashMap::with_capacity(capacity),
}
}
/// Create new `HeaderMap` from a `http::HeaderMap`-like drain.
pub(crate) fn from_drain<I>(mut drain: I) -> Self
where
I: Iterator<Item = (Option<HeaderName>, HeaderValue)>,
{
let (first_name, first_value) = match drain.next() {
None => return HeaderMap::new(),
Some((name, val)) => {
let name = name.expect("drained first item had no name");
(name, val)
}
};
let (lb, ub) = drain.size_hint();
let capacity = ub.unwrap_or(lb);
let mut map = HeaderMap::with_capacity(capacity);
map.append(first_name.clone(), first_value);
let (map, _) = drain.fold((map, first_name), |(mut map, prev_name), (name, value)| {
let name = name.unwrap_or(prev_name);
map.append(name.clone(), value);
(map, name)
});
map
}
/// Returns the number of values stored in the map.
///
/// See also: [`len_keys`](Self::len_keys).
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
/// assert_eq!(map.len(), 0);
///
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
/// map.insert(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// assert_eq!(map.len(), 2);
///
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
/// assert_eq!(map.len(), 3);
/// ```
pub fn len(&self) -> usize {
self.inner.values().map(|vals| vals.len()).sum()
}
/// Returns the number of _keys_ stored in the map.
///
/// The number of values stored will be at least this number. See also: [`Self::len`].
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
/// assert_eq!(map.len_keys(), 0);
///
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
/// map.insert(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// assert_eq!(map.len_keys(), 2);
///
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
/// assert_eq!(map.len_keys(), 2);
/// ```
pub fn len_keys(&self) -> usize {
self.inner.len()
}
/// Returns true if the map contains no elements.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
/// assert!(map.is_empty());
///
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
/// assert!(!map.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.inner.len() == 0
}
/// Clears the map, removing all name-value pairs.
///
/// Keeps the allocated memory for reuse.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
/// map.insert(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// assert_eq!(map.len(), 2);
///
/// map.clear();
/// assert!(map.is_empty());
/// ```
pub fn clear(&mut self) {
self.inner.clear();
}
fn get_value(&self, key: impl AsHeaderName) -> Option<&Value> {
match key.try_as_name(super::as_name::Seal).ok()? {
Cow::Borrowed(name) => self.inner.get(name),
Cow::Owned(name) => self.inner.get(&name),
}
}
/// Returns a reference to the _first_ value associated with a header name.
///
/// Returns `None` if there is no value associated with the key.
///
/// Even when multiple values are associated with the key, the "first" one is returned but is
/// not guaranteed to be chosen with any particular order; though, the returned item will be
/// consistent for each call to `get` if the map has not changed.
///
/// See also: [`get_all`](Self::get_all).
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// map.insert(header::SET_COOKIE, HeaderValue::from_static("one=1"));
///
/// let cookie = map.get(header::SET_COOKIE).unwrap();
/// assert_eq!(cookie, "one=1");
///
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
/// assert_eq!(map.get(header::SET_COOKIE).unwrap(), "one=1");
///
/// assert_eq!(map.get(header::SET_COOKIE), map.get("set-cookie"));
/// assert_eq!(map.get(header::SET_COOKIE), map.get("Set-Cookie"));
///
/// assert!(map.get(header::HOST).is_none());
/// assert!(map.get("INVALID HEADER NAME").is_none());
/// ```
pub fn get(&self, key: impl AsHeaderName) -> Option<&HeaderValue> {
self.get_value(key).map(Value::first)
}
/// Returns a mutable reference to the _first_ value associated a header name.
///
/// Returns `None` if there is no value associated with the key.
///
/// Even when multiple values are associated with the key, the "first" one is returned but is
/// not guaranteed to be chosen with any particular order; though, the returned item will be
/// consistent for each call to `get_mut` if the map has not changed.
///
/// See also: [`get_all`](Self::get_all).
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// map.insert(header::SET_COOKIE, HeaderValue::from_static("one=1"));
///
/// let mut cookie = map.get_mut(header::SET_COOKIE).unwrap();
/// assert_eq!(cookie, "one=1");
///
/// *cookie = HeaderValue::from_static("three=3");
/// assert_eq!(map.get(header::SET_COOKIE).unwrap(), "three=3");
///
/// assert!(map.get(header::HOST).is_none());
/// assert!(map.get("INVALID HEADER NAME").is_none());
/// ```
pub fn get_mut(&mut self, key: impl AsHeaderName) -> Option<&mut HeaderValue> {
match key.try_as_name(super::as_name::Seal).ok()? {
Cow::Borrowed(name) => self.inner.get_mut(name).map(Value::first_mut),
Cow::Owned(name) => self.inner.get_mut(&name).map(Value::first_mut),
}
}
/// Returns an iterator over all values associated with a header name.
///
/// The returned iterator does not incur any allocations and will yield no items if there are no
/// values associated with the key. Iteration order is guaranteed to be the same as
/// insertion order.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// let mut none_iter = map.get_all(header::ORIGIN);
/// assert!(none_iter.next().is_none());
///
/// map.insert(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
///
/// let mut set_cookies_iter = map.get_all(header::SET_COOKIE);
/// assert_eq!(set_cookies_iter.next().unwrap(), "one=1");
/// assert_eq!(set_cookies_iter.next().unwrap(), "two=2");
/// assert!(set_cookies_iter.next().is_none());
/// ```
pub fn get_all(&self, key: impl AsHeaderName) -> std::slice::Iter<'_, HeaderValue> {
match self.get_value(key) {
Some(value) => value.iter(),
None => [].iter(),
}
}
// TODO: get_all_mut ?
/// Returns `true` if the map contains a value for the specified key.
///
/// Invalid header names will simply return false.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
/// assert!(!map.contains_key(header::ACCEPT));
///
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
/// assert!(map.contains_key(header::ACCEPT));
/// ```
pub fn contains_key(&self, key: impl AsHeaderName) -> bool {
match key.try_as_name(super::as_name::Seal) {
Ok(Cow::Borrowed(name)) => self.inner.contains_key(name),
Ok(Cow::Owned(name)) => self.inner.contains_key(&name),
Err(_) => false,
}
}
/// Inserts (overrides) a name-value pair in the map.
///
/// If the map already contained this key, the new value is associated with the key and all
/// previous values are removed and returned as a `Removed` iterator. The key is not updated;
/// this matters for types that can be `==` without being identical.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
/// assert!(map.contains_key(header::ACCEPT));
/// assert_eq!(map.len(), 1);
///
/// let mut removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/csv"));
/// assert_eq!(removed.next().unwrap(), "text/plain");
/// assert!(removed.next().is_none());
///
/// assert_eq!(map.len(), 1);
/// ```
///
/// A convenience method is provided on the returned iterator to check if the insertion replaced
/// any values.
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/plain"));
/// assert!(removed.is_empty());
///
/// let removed = map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
/// assert!(!removed.is_empty());
/// ```
pub fn insert(&mut self, name: HeaderName, val: HeaderValue) -> Removed {
let value = self.inner.insert(name, Value::one(val));
Removed::new(value)
}
/// Appends a name-value pair to the map.
///
/// If the map already contained this key, the new value is added to the list of values
/// currently associated with the key. The key is not updated; this matters for types that can
/// be `==` without being identical.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// map.append(header::HOST, HeaderValue::from_static("example.com"));
/// assert_eq!(map.len(), 1);
///
/// map.append(header::ACCEPT, HeaderValue::from_static("text/csv"));
/// assert_eq!(map.len(), 2);
///
/// map.append(header::ACCEPT, HeaderValue::from_static("text/html"));
/// assert_eq!(map.len(), 3);
/// ```
pub fn append(&mut self, key: HeaderName, value: HeaderValue) {
match self.inner.entry(key) {
hash_map::Entry::Occupied(mut entry) => {
entry.get_mut().append(value);
}
hash_map::Entry::Vacant(entry) => {
entry.insert(Value::one(value));
}
};
}
/// Removes all headers for a particular header name from the map.
///
/// Providing an invalid header names (as a string argument) will have no effect and return
/// without error.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// map.append(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("one=2"));
///
/// assert_eq!(map.len(), 2);
///
/// let mut removed = map.remove(header::SET_COOKIE);
/// assert_eq!(removed.next().unwrap(), "one=1");
/// assert_eq!(removed.next().unwrap(), "one=2");
/// assert!(removed.next().is_none());
///
/// assert!(map.is_empty());
/// ```
///
/// A convenience method is provided on the returned iterator to check if the `remove` call
/// actually removed any values.
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// let removed = map.remove("accept");
/// assert!(removed.is_empty());
///
/// map.insert(header::ACCEPT, HeaderValue::from_static("text/html"));
/// let removed = map.remove("accept");
/// assert!(!removed.is_empty());
/// ```
pub fn remove(&mut self, key: impl AsHeaderName) -> Removed {
let value = match key.try_as_name(super::as_name::Seal) {
Ok(Cow::Borrowed(name)) => self.inner.remove(name),
Ok(Cow::Owned(name)) => self.inner.remove(&name),
Err(_) => None,
};
Removed::new(value)
}
/// Returns the number of single-value headers the map can hold without needing to reallocate.
///
/// Since this is a multi-value map, the actual capacity is much larger when considering
/// each header name can be associated with an arbitrary number of values. The effect is that
/// the size of `len` may be greater than `capacity` since it counts all the values.
/// Conversely, [`len_keys`](Self::len_keys) will never be larger than capacity.
///
/// # Examples
/// ```
/// # use actix_http::header::HeaderMap;
/// let map = HeaderMap::with_capacity(16);
///
/// assert!(map.is_empty());
/// assert!(map.capacity() >= 16);
/// ```
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
/// Reserves capacity for at least `additional` more headers to be inserted in the map.
///
/// The header map may reserve more space to avoid frequent reallocations. Additional capacity
/// only considers single-value headers.
///
/// # Panics
/// Panics if the new allocation size overflows usize.
///
/// # Examples
/// ```
/// # use actix_http::header::HeaderMap;
/// let mut map = HeaderMap::with_capacity(2);
/// assert!(map.capacity() >= 2);
///
/// map.reserve(100);
/// assert!(map.capacity() >= 102);
///
/// assert!(map.is_empty());
/// ```
pub fn reserve(&mut self, additional: usize) {
self.inner.reserve(additional)
}
/// An iterator over all name-value pairs.
///
/// Names will be yielded for each associated value. So, if a key has 3 associated values, it
/// will be yielded 3 times. The iteration order should be considered arbitrary.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// let mut iter = map.iter();
/// assert!(iter.next().is_none());
///
/// map.append(header::HOST, HeaderValue::from_static("duck.com"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
///
/// let mut iter = map.iter();
/// assert!(iter.next().is_some());
/// assert!(iter.next().is_some());
/// assert!(iter.next().is_some());
/// assert!(iter.next().is_none());
///
/// let pairs = map.iter().collect::<Vec<_>>();
/// assert!(pairs.contains(&(&header::HOST, &HeaderValue::from_static("duck.com"))));
/// assert!(pairs.contains(&(&header::SET_COOKIE, &HeaderValue::from_static("one=1"))));
/// assert!(pairs.contains(&(&header::SET_COOKIE, &HeaderValue::from_static("two=2"))));
/// ```
pub fn iter(&self) -> Iter<'_> {
Iter::new(self.inner.iter())
}
/// An iterator over all contained header names.
///
/// Each name will only be yielded once even if it has multiple associated values. The iteration
/// order should be considered arbitrary.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// let mut iter = map.keys();
/// assert!(iter.next().is_none());
///
/// map.append(header::HOST, HeaderValue::from_static("duck.com"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
///
/// let keys = map.keys().cloned().collect::<Vec<_>>();
/// assert_eq!(keys.len(), 2);
/// assert!(keys.contains(&header::HOST));
/// assert!(keys.contains(&header::SET_COOKIE));
/// ```
pub fn keys(&self) -> Keys<'_> {
Keys(self.inner.keys())
}
/// Retains only the headers specified by the predicate.
///
/// In other words, removes all headers `(name, val)` for which `retain_fn(&name, &mut val)`
/// returns false.
///
/// The order in which headers are visited should be considered arbitrary.
///
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// map.append(header::HOST, HeaderValue::from_static("duck.com"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
///
/// map.retain(|name, val| val.as_bytes().starts_with(b"one"));
///
/// assert_eq!(map.len(), 1);
/// assert!(map.contains_key(&header::SET_COOKIE));
/// ```
pub fn retain<F>(&mut self, mut retain_fn: F)
where
F: FnMut(&HeaderName, &mut HeaderValue) -> bool,
{
self.inner.retain(|name, vals| {
vals.inner.retain(|val| retain_fn(name, val));
// invariant: make sure newly empty value lists are removed
!vals.is_empty()
})
}
/// Clears the map, returning all name-value sets as an iterator.
///
/// Header names will only be yielded for the first value in each set. All items that are
/// yielded without a name and after an item with a name are associated with that same name.
/// The first item will always contain a name.
///
/// Keeps the allocated memory for reuse.
/// # Examples
/// ```
/// # use actix_http::header::{self, HeaderMap, HeaderValue};
/// let mut map = HeaderMap::new();
///
/// let mut iter = map.drain();
/// assert!(iter.next().is_none());
/// drop(iter);
///
/// map.append(header::SET_COOKIE, HeaderValue::from_static("one=1"));
/// map.append(header::SET_COOKIE, HeaderValue::from_static("two=2"));
///
/// let mut iter = map.drain();
/// assert_eq!(iter.next().unwrap(), (Some(header::SET_COOKIE), HeaderValue::from_static("one=1")));
/// assert_eq!(iter.next().unwrap(), (None, HeaderValue::from_static("two=2")));
/// drop(iter);
///
/// assert!(map.is_empty());
/// ```
pub fn drain(&mut self) -> Drain<'_> {
Drain::new(self.inner.drain())
}
}
/// Note that this implementation will clone a [HeaderName] for each value. Consider using
/// [`drain`](Self::drain) to control header name cloning.
impl IntoIterator for HeaderMap {
type Item = (HeaderName, HeaderValue);
type IntoIter = IntoIter;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self.inner.into_iter())
}
}
impl<'a> IntoIterator for &'a HeaderMap {
type Item = (&'a HeaderName, &'a HeaderValue);
type IntoIter = Iter<'a>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
Iter::new(self.inner.iter())
}
}
impl FromIterator<(HeaderName, HeaderValue)> for HeaderMap {
fn from_iter<T: IntoIterator<Item = (HeaderName, HeaderValue)>>(iter: T) -> Self {
iter.into_iter()
.fold(Self::new(), |mut map, (name, value)| {
map.append(name, value);
map
})
}
}
/// Convert a `http::HeaderMap` to our `HeaderMap`.
impl From<http::HeaderMap> for HeaderMap {
fn from(mut map: http::HeaderMap) -> Self {
Self::from_drain(map.drain())
}
}
/// Convert our `HeaderMap` to a `http::HeaderMap`.
impl From<HeaderMap> for http::HeaderMap {
fn from(map: HeaderMap) -> Self {
Self::from_iter(map)
}
}
/// Convert our `&HeaderMap` to a `http::HeaderMap`.
impl From<&HeaderMap> for http::HeaderMap {
fn from(map: &HeaderMap) -> Self {
map.to_owned().into()
}
}
/// Iterator over removed, owned values with the same associated name.
///
/// Returned from methods that remove or replace items. See [`HeaderMap::insert`]
/// and [`HeaderMap::remove`].
#[derive(Debug)]
pub struct Removed {
inner: Option<smallvec::IntoIter<[HeaderValue; 4]>>,
}
impl Removed {
fn new(value: Option<Value>) -> Self {
let inner = value.map(|value| value.inner.into_iter());
Self { inner }
}
/// Returns true if iterator contains no elements, without consuming it.
///
/// If called immediately after [`HeaderMap::insert`] or [`HeaderMap::remove`], it will indicate
/// whether any items were actually replaced or removed, respectively.
pub fn is_empty(&self) -> bool {
match self.inner {
// size hint lower bound of smallvec is the correct length
Some(ref iter) => iter.size_hint().0 == 0,
None => true,
}
}
}
impl Iterator for Removed {
type Item = HeaderValue;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.inner.as_mut()?.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
match self.inner {
Some(ref iter) => iter.size_hint(),
None => (0, None),
}
}
}
impl ExactSizeIterator for Removed {}
impl iter::FusedIterator for Removed {}
/// Iterator over all names in the map.
#[derive(Debug)]
pub struct Keys<'a>(hash_map::Keys<'a, HeaderName, Value>);
impl<'a> Iterator for Keys<'a> {
type Item = &'a HeaderName;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl ExactSizeIterator for Keys<'_> {}
impl iter::FusedIterator for Keys<'_> {}
/// Iterator over borrowed name-value pairs.
#[derive(Debug)]
pub struct Iter<'a> {
inner: hash_map::Iter<'a, HeaderName, Value>,
multi_inner: Option<(&'a HeaderName, &'a SmallVec<[HeaderValue; 4]>)>,
multi_idx: usize,
}
impl<'a> Iter<'a> {
fn new(iter: hash_map::Iter<'a, HeaderName, Value>) -> Self {
Self {
inner: iter,
multi_idx: 0,
multi_inner: None,
}
}
}
impl<'a> Iterator for Iter<'a> {
type Item = (&'a HeaderName, &'a HeaderValue);
fn next(&mut self) -> Option<Self::Item> {
// handle in-progress multi value lists first
if let Some((name, ref mut vals)) = self.multi_inner {
match vals.get(self.multi_idx) {
Some(val) => {
self.multi_idx += 1;
return Some((name, val));
}
None => {
// no more items in value list; reset state
self.multi_idx = 0;
self.multi_inner = None;
}
}
}
let (name, value) = self.inner.next()?;
// set up new inner iter and recurse into it
self.multi_inner = Some((name, &value.inner));
self.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// take inner lower bound
// make no attempt at an upper bound
(self.inner.size_hint().0, None)
}
}
impl ExactSizeIterator for Iter<'_> {}
impl iter::FusedIterator for Iter<'_> {}
/// Iterator over drained name-value pairs.
///
/// Iterator items are `(Option<HeaderName>, HeaderValue)` to avoid cloning.
#[derive(Debug)]
pub struct Drain<'a> {
inner: hash_map::Drain<'a, HeaderName, Value>,
multi_inner: Option<(Option<HeaderName>, SmallVec<[HeaderValue; 4]>)>,
multi_idx: usize,
}
impl<'a> Drain<'a> {
fn new(iter: hash_map::Drain<'a, HeaderName, Value>) -> Self {
Self {
inner: iter,
multi_inner: None,
multi_idx: 0,
}
}
}
impl Iterator for Drain<'_> {
type Item = (Option<HeaderName>, HeaderValue);
fn next(&mut self) -> Option<Self::Item> {
// handle in-progress multi value iterators first
if let Some((ref mut name, ref mut vals)) = self.multi_inner {
if !vals.is_empty() {
// OPTIMIZE: array removals
return Some((name.take(), vals.remove(0)));
} else {
// no more items in value iterator; reset state
self.multi_inner = None;
self.multi_idx = 0;
}
}
let (name, value) = self.inner.next()?;
// set up new inner iter and recurse into it
self.multi_inner = Some((Some(name), value.inner));
self.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// take inner lower bound
// make no attempt at an upper bound
(self.inner.size_hint().0, None)
}
}
impl ExactSizeIterator for Drain<'_> {}
impl iter::FusedIterator for Drain<'_> {}
/// Iterator over owned name-value pairs.
///
/// Implementation necessarily clones header names for each value.
#[derive(Debug)]
pub struct IntoIter {
inner: hash_map::IntoIter<HeaderName, Value>,
multi_inner: Option<(HeaderName, smallvec::IntoIter<[HeaderValue; 4]>)>,
}
impl IntoIter {
fn new(inner: hash_map::IntoIter<HeaderName, Value>) -> Self {
Self {
inner,
multi_inner: None,
}
}
}
impl Iterator for IntoIter {
type Item = (HeaderName, HeaderValue);
fn next(&mut self) -> Option<Self::Item> {
// handle in-progress multi value iterators first
if let Some((ref name, ref mut vals)) = self.multi_inner {
match vals.next() {
Some(val) => {
return Some((name.clone(), val));
}
None => {
// no more items in value iterator; reset state
self.multi_inner = None;
}
}
}
let (name, value) = self.inner.next()?;
// set up new inner iter and recurse into it
self.multi_inner = Some((name, value.inner.into_iter()));
self.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// take inner lower bound
// make no attempt at an upper bound
(self.inner.size_hint().0, None)
}
}
impl ExactSizeIterator for IntoIter {}
impl iter::FusedIterator for IntoIter {}
#[cfg(test)]
mod tests {
use std::iter::FusedIterator;
use http::header;
use static_assertions::assert_impl_all;
use super::*;
assert_impl_all!(HeaderMap: IntoIterator);
assert_impl_all!(Keys<'_>: Iterator, ExactSizeIterator, FusedIterator);
assert_impl_all!(std::slice::Iter<'_, HeaderValue>: Iterator, ExactSizeIterator, FusedIterator);
assert_impl_all!(Removed: Iterator, ExactSizeIterator, FusedIterator);
assert_impl_all!(Iter<'_>: Iterator, ExactSizeIterator, FusedIterator);
assert_impl_all!(IntoIter: Iterator, ExactSizeIterator, FusedIterator);
assert_impl_all!(Drain<'_>: Iterator, ExactSizeIterator, FusedIterator);
#[test]
fn create() {
let map = HeaderMap::new();
assert_eq!(map.len(), 0);
assert_eq!(map.capacity(), 0);
let map = HeaderMap::with_capacity(16);
assert_eq!(map.len(), 0);
assert!(map.capacity() >= 16);
}
#[test]
fn insert() {
let mut map = HeaderMap::new();
map.insert(header::LOCATION, HeaderValue::from_static("/test"));
assert_eq!(map.len(), 1);
}
#[test]
fn contains() {
let mut map = HeaderMap::new();
assert!(!map.contains_key(header::LOCATION));
map.insert(header::LOCATION, HeaderValue::from_static("/test"));
assert!(map.contains_key(header::LOCATION));
assert!(map.contains_key("Location"));
assert!(map.contains_key("Location".to_owned()));
assert!(map.contains_key("location"));
}
#[test]
fn entries_iter() {
let mut map = HeaderMap::new();
map.append(header::HOST, HeaderValue::from_static("duck.com"));
map.append(header::COOKIE, HeaderValue::from_static("one=1"));
map.append(header::COOKIE, HeaderValue::from_static("two=2"));
let mut iter = map.iter();
assert!(iter.next().is_some());
assert!(iter.next().is_some());
assert!(iter.next().is_some());
assert!(iter.next().is_none());
let pairs = map.iter().collect::<Vec<_>>();
assert!(pairs.contains(&(&header::HOST, &HeaderValue::from_static("duck.com"))));
assert!(pairs.contains(&(&header::COOKIE, &HeaderValue::from_static("one=1"))));
assert!(pairs.contains(&(&header::COOKIE, &HeaderValue::from_static("two=2"))));
}
#[test]
fn drain_iter() {
let mut map = HeaderMap::new();
map.append(header::COOKIE, HeaderValue::from_static("one=1"));
map.append(header::COOKIE, HeaderValue::from_static("two=2"));
let mut vals = vec![];
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | true |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/into_pair.rs | actix-http/src/header/into_pair.rs | //! [`TryIntoHeaderPair`] trait and implementations.
use super::{
Header, HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, TryIntoHeaderValue,
};
use crate::error::HttpError;
/// An interface for types that can be converted into a [`HeaderName`] + [`HeaderValue`] pair for
/// insertion into a [`HeaderMap`].
///
/// [`HeaderMap`]: super::HeaderMap
pub trait TryIntoHeaderPair: Sized {
type Error: Into<HttpError>;
fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error>;
}
#[derive(Debug)]
pub enum InvalidHeaderPart {
Name(InvalidHeaderName),
Value(InvalidHeaderValue),
}
impl From<InvalidHeaderPart> for HttpError {
fn from(part_err: InvalidHeaderPart) -> Self {
match part_err {
InvalidHeaderPart::Name(err) => err.into(),
InvalidHeaderPart::Value(err) => err.into(),
}
}
}
impl<V> TryIntoHeaderPair for (HeaderName, V)
where
V: TryIntoHeaderValue,
V::Error: Into<InvalidHeaderValue>,
{
type Error = InvalidHeaderPart;
fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error> {
let (name, value) = self;
let value = value
.try_into_value()
.map_err(|err| InvalidHeaderPart::Value(err.into()))?;
Ok((name, value))
}
}
impl<V> TryIntoHeaderPair for (&HeaderName, V)
where
V: TryIntoHeaderValue,
V::Error: Into<InvalidHeaderValue>,
{
type Error = InvalidHeaderPart;
fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error> {
let (name, value) = self;
let value = value
.try_into_value()
.map_err(|err| InvalidHeaderPart::Value(err.into()))?;
Ok((name.clone(), value))
}
}
impl<V> TryIntoHeaderPair for (&[u8], V)
where
V: TryIntoHeaderValue,
V::Error: Into<InvalidHeaderValue>,
{
type Error = InvalidHeaderPart;
fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error> {
let (name, value) = self;
let name = HeaderName::try_from(name).map_err(InvalidHeaderPart::Name)?;
let value = value
.try_into_value()
.map_err(|err| InvalidHeaderPart::Value(err.into()))?;
Ok((name, value))
}
}
impl<V> TryIntoHeaderPair for (&str, V)
where
V: TryIntoHeaderValue,
V::Error: Into<InvalidHeaderValue>,
{
type Error = InvalidHeaderPart;
fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error> {
let (name, value) = self;
let name = HeaderName::try_from(name).map_err(InvalidHeaderPart::Name)?;
let value = value
.try_into_value()
.map_err(|err| InvalidHeaderPart::Value(err.into()))?;
Ok((name, value))
}
}
impl<V> TryIntoHeaderPair for (String, V)
where
V: TryIntoHeaderValue,
V::Error: Into<InvalidHeaderValue>,
{
type Error = InvalidHeaderPart;
#[inline]
fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error> {
let (name, value) = self;
(name.as_str(), value).try_into_pair()
}
}
impl<T: Header> TryIntoHeaderPair for T {
type Error = <T as TryIntoHeaderValue>::Error;
#[inline]
fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error> {
Ok((T::name(), self.try_into_value()?))
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/utils.rs | actix-http/src/header/utils.rs | //! Header parsing utilities.
use std::{fmt, str::FromStr};
use super::HeaderValue;
use crate::{error::ParseError, header::HTTP_VALUE};
/// Reads a comma-delimited raw header into a Vec.
#[inline]
pub fn from_comma_delimited<'a, I, T>(all: I) -> Result<Vec<T>, ParseError>
where
I: Iterator<Item = &'a HeaderValue> + 'a,
T: FromStr,
{
let size_guess = all.size_hint().1.unwrap_or(2);
let mut result = Vec::with_capacity(size_guess);
for h in all {
let s = h.to_str().map_err(|_| ParseError::Header)?;
result.extend(
s.split(',')
.filter_map(|x| match x.trim() {
"" => None,
y => Some(y),
})
.filter_map(|x| x.trim().parse().ok()),
)
}
Ok(result)
}
/// Reads a single string when parsing a header.
#[inline]
pub fn from_one_raw_str<T: FromStr>(val: Option<&HeaderValue>) -> Result<T, ParseError> {
if let Some(line) = val {
let line = line.to_str().map_err(|_| ParseError::Header)?;
if !line.is_empty() {
return T::from_str(line).or(Err(ParseError::Header));
}
}
Err(ParseError::Header)
}
/// Format an array into a comma-delimited string.
#[inline]
pub fn fmt_comma_delimited<T>(f: &mut fmt::Formatter<'_>, parts: &[T]) -> fmt::Result
where
T: fmt::Display,
{
let mut iter = parts.iter();
if let Some(part) = iter.next() {
fmt::Display::fmt(part, f)?;
}
for part in iter {
f.write_str(", ")?;
fmt::Display::fmt(part, f)?;
}
Ok(())
}
/// Percent encode a sequence of bytes with a character set defined in [RFC 5987 §3.2].
///
/// [RFC 5987 §3.2]: https://datatracker.ietf.org/doc/html/rfc5987#section-3.2
#[inline]
pub fn http_percent_encode(f: &mut fmt::Formatter<'_>, bytes: &[u8]) -> fmt::Result {
let encoded = percent_encoding::percent_encode(bytes, HTTP_VALUE);
fmt::Display::fmt(&encoded, f)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn comma_delimited_parsing() {
let headers = [];
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
assert_eq!(res, vec![0; 0]);
let headers = [
HeaderValue::from_static("1, 2"),
HeaderValue::from_static("3,4"),
];
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
assert_eq!(res, vec![1, 2, 3, 4]);
let headers = [
HeaderValue::from_static(""),
HeaderValue::from_static(","),
HeaderValue::from_static(" "),
HeaderValue::from_static("1 ,"),
HeaderValue::from_static(""),
];
let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();
assert_eq!(res, vec![1]);
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/mod.rs | actix-http/src/header/mod.rs | //! Pre-defined `HeaderName`s, traits for parsing and conversion, and other header utility methods.
// declaring new header consts will yield this error
#![allow(clippy::declare_interior_mutable_const)]
// re-export from http except header map related items
pub use ::http::header::{
HeaderName, HeaderValue, InvalidHeaderName, InvalidHeaderValue, ToStrError,
};
// re-export const header names, list is explicit so that any updates to `common` module do not
// conflict with this set
pub use ::http::header::{
ACCEPT, ACCEPT_CHARSET, ACCEPT_ENCODING, ACCEPT_LANGUAGE, ACCEPT_RANGES,
ACCESS_CONTROL_ALLOW_CREDENTIALS, ACCESS_CONTROL_ALLOW_HEADERS, ACCESS_CONTROL_ALLOW_METHODS,
ACCESS_CONTROL_ALLOW_ORIGIN, ACCESS_CONTROL_EXPOSE_HEADERS, ACCESS_CONTROL_MAX_AGE,
ACCESS_CONTROL_REQUEST_HEADERS, ACCESS_CONTROL_REQUEST_METHOD, AGE, ALLOW, ALT_SVC,
AUTHORIZATION, CACHE_CONTROL, CONNECTION, CONTENT_DISPOSITION, CONTENT_ENCODING,
CONTENT_LANGUAGE, CONTENT_LENGTH, CONTENT_LOCATION, CONTENT_RANGE, CONTENT_SECURITY_POLICY,
CONTENT_SECURITY_POLICY_REPORT_ONLY, CONTENT_TYPE, COOKIE, DATE, DNT, ETAG, EXPECT, EXPIRES,
FORWARDED, FROM, HOST, IF_MATCH, IF_MODIFIED_SINCE, IF_NONE_MATCH, IF_RANGE,
IF_UNMODIFIED_SINCE, LAST_MODIFIED, LINK, LOCATION, MAX_FORWARDS, ORIGIN, PRAGMA,
PROXY_AUTHENTICATE, PROXY_AUTHORIZATION, PUBLIC_KEY_PINS, PUBLIC_KEY_PINS_REPORT_ONLY, RANGE,
REFERER, REFERRER_POLICY, REFRESH, RETRY_AFTER, SEC_WEBSOCKET_ACCEPT, SEC_WEBSOCKET_EXTENSIONS,
SEC_WEBSOCKET_KEY, SEC_WEBSOCKET_PROTOCOL, SEC_WEBSOCKET_VERSION, SERVER, SET_COOKIE,
STRICT_TRANSPORT_SECURITY, TE, TRAILER, TRANSFER_ENCODING, UPGRADE, UPGRADE_INSECURE_REQUESTS,
USER_AGENT, VARY, VIA, WARNING, WWW_AUTHENTICATE, X_CONTENT_TYPE_OPTIONS,
X_DNS_PREFETCH_CONTROL, X_FRAME_OPTIONS, X_XSS_PROTECTION,
};
use percent_encoding::{AsciiSet, CONTROLS};
use crate::{error::ParseError, HttpMessage};
mod as_name;
mod common;
mod into_pair;
mod into_value;
pub mod map;
mod shared;
mod utils;
pub use self::{
as_name::AsHeaderName,
// re-export list is explicit so that any updates to `http` do not conflict with this set
common::{
CACHE_STATUS, CDN_CACHE_CONTROL, CLEAR_SITE_DATA, CROSS_ORIGIN_EMBEDDER_POLICY,
CROSS_ORIGIN_OPENER_POLICY, CROSS_ORIGIN_RESOURCE_POLICY, PERMISSIONS_POLICY,
X_FORWARDED_FOR, X_FORWARDED_HOST, X_FORWARDED_PROTO,
},
into_pair::TryIntoHeaderPair,
into_value::TryIntoHeaderValue,
map::HeaderMap,
shared::{
parse_extended_value, q, Charset, ContentEncoding, ExtendedValue, HttpDate, LanguageTag,
Quality, QualityItem,
},
utils::{fmt_comma_delimited, from_comma_delimited, from_one_raw_str, http_percent_encode},
};
/// An interface for types that already represent a valid header.
pub trait Header: TryIntoHeaderValue {
/// Returns the name of the header field.
fn name() -> HeaderName;
/// Parse the header from a HTTP message.
fn parse<M: HttpMessage>(msg: &M) -> Result<Self, ParseError>;
}
/// This encode set is used for HTTP header values and is defined at
/// <https://datatracker.ietf.org/doc/html/rfc5987#section-3.2>.
pub(crate) const HTTP_VALUE: &AsciiSet = &CONTROLS
.add(b' ')
.add(b'"')
.add(b'%')
.add(b'\'')
.add(b'(')
.add(b')')
.add(b'*')
.add(b',')
.add(b'/')
.add(b':')
.add(b';')
.add(b'<')
.add(b'-')
.add(b'>')
.add(b'?')
.add(b'[')
.add(b'\\')
.add(b']')
.add(b'{')
.add(b'}');
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/common.rs | actix-http/src/header/common.rs | //! Common header names not defined in [`http`].
//!
//! Any headers added to this file will need to be re-exported from the list at `crate::headers`.
use http::header::HeaderName;
/// Response header field that indicates how caches have handled that response and its corresponding
/// request.
///
/// See [RFC 9211](https://www.rfc-editor.org/rfc/rfc9211) for full semantics.
// TODO(breaking): replace with http's version
pub const CACHE_STATUS: HeaderName = HeaderName::from_static("cache-status");
/// Response header field that allows origin servers to control the behavior of CDN caches
/// interposed between them and clients separately from other caches that might handle the response.
///
/// See [RFC 9213](https://www.rfc-editor.org/rfc/rfc9213) for full semantics.
// TODO(breaking): replace with http's version
pub const CDN_CACHE_CONTROL: HeaderName = HeaderName::from_static("cdn-cache-control");
/// Response header field that sends a signal to the user agent that it ought to remove all data of
/// a certain set of types.
///
/// See the [W3C Clear-Site-Data spec] for full semantics.
///
/// [W3C Clear-Site-Data spec]: https://www.w3.org/TR/clear-site-data/#header
pub const CLEAR_SITE_DATA: HeaderName = HeaderName::from_static("clear-site-data");
/// Response header that prevents a document from loading any cross-origin resources that don't
/// explicitly grant the document permission (using [CORP] or [CORS]).
///
/// [CORP]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Cross-Origin_Resource_Policy_(CORP)
/// [CORS]: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS
pub const CROSS_ORIGIN_EMBEDDER_POLICY: HeaderName =
HeaderName::from_static("cross-origin-embedder-policy");
/// Response header that allows you to ensure a top-level document does not share a browsing context
/// group with cross-origin documents.
pub const CROSS_ORIGIN_OPENER_POLICY: HeaderName =
HeaderName::from_static("cross-origin-opener-policy");
/// Response header that conveys a desire that the browser blocks no-cors cross-origin/cross-site
/// requests to the given resource.
pub const CROSS_ORIGIN_RESOURCE_POLICY: HeaderName =
HeaderName::from_static("cross-origin-resource-policy");
/// Response header that provides a mechanism to allow and deny the use of browser features in a
/// document or within any `<iframe>` elements in the document.
pub const PERMISSIONS_POLICY: HeaderName = HeaderName::from_static("permissions-policy");
/// Request header (de-facto standard) for identifying the originating IP address of a client
/// connecting to a web server through a proxy server.
pub const X_FORWARDED_FOR: HeaderName = HeaderName::from_static("x-forwarded-for");
/// Request header (de-facto standard) for identifying the original host requested by the client in
/// the `Host` HTTP request header.
pub const X_FORWARDED_HOST: HeaderName = HeaderName::from_static("x-forwarded-host");
/// Request header (de-facto standard) for identifying the protocol that a client used to connect to
/// your proxy or load balancer.
pub const X_FORWARDED_PROTO: HeaderName = HeaderName::from_static("x-forwarded-proto");
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/shared/charset.rs | actix-http/src/header/shared/charset.rs | use std::{fmt, str};
use self::Charset::*;
/// A MIME character set.
///
/// The string representation is normalized to upper case.
///
/// See <http://www.iana.org/assignments/character-sets/character-sets.xhtml>.
#[derive(Debug, Clone, PartialEq, Eq)]
#[allow(non_camel_case_types)]
pub enum Charset {
/// US ASCII
Us_Ascii,
/// ISO-8859-1
Iso_8859_1,
/// ISO-8859-2
Iso_8859_2,
/// ISO-8859-3
Iso_8859_3,
/// ISO-8859-4
Iso_8859_4,
/// ISO-8859-5
Iso_8859_5,
/// ISO-8859-6
Iso_8859_6,
/// ISO-8859-7
Iso_8859_7,
/// ISO-8859-8
Iso_8859_8,
/// ISO-8859-9
Iso_8859_9,
/// ISO-8859-10
Iso_8859_10,
/// Shift_JIS
Shift_Jis,
/// EUC-JP
Euc_Jp,
/// ISO-2022-KR
Iso_2022_Kr,
/// EUC-KR
Euc_Kr,
/// ISO-2022-JP
Iso_2022_Jp,
/// ISO-2022-JP-2
Iso_2022_Jp_2,
/// ISO-8859-6-E
Iso_8859_6_E,
/// ISO-8859-6-I
Iso_8859_6_I,
/// ISO-8859-8-E
Iso_8859_8_E,
/// ISO-8859-8-I
Iso_8859_8_I,
/// GB2312
Gb2312,
/// Big5
Big5,
/// KOI8-R
Koi8_R,
/// An arbitrary charset specified as a string
Ext(String),
}
impl Charset {
fn label(&self) -> &str {
match *self {
Us_Ascii => "US-ASCII",
Iso_8859_1 => "ISO-8859-1",
Iso_8859_2 => "ISO-8859-2",
Iso_8859_3 => "ISO-8859-3",
Iso_8859_4 => "ISO-8859-4",
Iso_8859_5 => "ISO-8859-5",
Iso_8859_6 => "ISO-8859-6",
Iso_8859_7 => "ISO-8859-7",
Iso_8859_8 => "ISO-8859-8",
Iso_8859_9 => "ISO-8859-9",
Iso_8859_10 => "ISO-8859-10",
Shift_Jis => "Shift-JIS",
Euc_Jp => "EUC-JP",
Iso_2022_Kr => "ISO-2022-KR",
Euc_Kr => "EUC-KR",
Iso_2022_Jp => "ISO-2022-JP",
Iso_2022_Jp_2 => "ISO-2022-JP-2",
Iso_8859_6_E => "ISO-8859-6-E",
Iso_8859_6_I => "ISO-8859-6-I",
Iso_8859_8_E => "ISO-8859-8-E",
Iso_8859_8_I => "ISO-8859-8-I",
Gb2312 => "GB2312",
Big5 => "Big5",
Koi8_R => "KOI8-R",
Ext(ref s) => s,
}
}
}
impl fmt::Display for Charset {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.label())
}
}
impl str::FromStr for Charset {
type Err = crate::Error;
fn from_str(s: &str) -> Result<Charset, crate::Error> {
Ok(match s.to_ascii_uppercase().as_ref() {
"US-ASCII" => Us_Ascii,
"ISO-8859-1" => Iso_8859_1,
"ISO-8859-2" => Iso_8859_2,
"ISO-8859-3" => Iso_8859_3,
"ISO-8859-4" => Iso_8859_4,
"ISO-8859-5" => Iso_8859_5,
"ISO-8859-6" => Iso_8859_6,
"ISO-8859-7" => Iso_8859_7,
"ISO-8859-8" => Iso_8859_8,
"ISO-8859-9" => Iso_8859_9,
"ISO-8859-10" => Iso_8859_10,
"SHIFT-JIS" => Shift_Jis,
"EUC-JP" => Euc_Jp,
"ISO-2022-KR" => Iso_2022_Kr,
"EUC-KR" => Euc_Kr,
"ISO-2022-JP" => Iso_2022_Jp,
"ISO-2022-JP-2" => Iso_2022_Jp_2,
"ISO-8859-6-E" => Iso_8859_6_E,
"ISO-8859-6-I" => Iso_8859_6_I,
"ISO-8859-8-E" => Iso_8859_8_E,
"ISO-8859-8-I" => Iso_8859_8_I,
"GB2312" => Gb2312,
"BIG5" => Big5,
"KOI8-R" => Koi8_R,
s => Ext(s.to_owned()),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
assert_eq!(Us_Ascii, "us-ascii".parse().unwrap());
assert_eq!(Us_Ascii, "US-Ascii".parse().unwrap());
assert_eq!(Us_Ascii, "US-ASCII".parse().unwrap());
assert_eq!(Shift_Jis, "Shift-JIS".parse().unwrap());
assert_eq!(Ext("ABCD".to_owned()), "abcd".parse().unwrap());
}
#[test]
fn test_display() {
assert_eq!("US-ASCII", format!("{}", Us_Ascii));
assert_eq!("ABCD", format!("{}", Ext("ABCD".to_owned())));
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/shared/http_date.rs | actix-http/src/header/shared/http_date.rs | use std::{fmt, io::Write, str::FromStr, time::SystemTime};
use bytes::BytesMut;
use http::header::{HeaderValue, InvalidHeaderValue};
use crate::{
date::DATE_VALUE_LENGTH, error::ParseError, header::TryIntoHeaderValue, helpers::MutWriter,
};
/// A timestamp with HTTP-style formatting and parsing.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct HttpDate(SystemTime);
impl FromStr for HttpDate {
type Err = ParseError;
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
match httpdate::parse_http_date(s) {
Ok(sys_time) => Ok(HttpDate(sys_time)),
Err(_) => Err(ParseError::Header),
}
}
}
impl fmt::Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
httpdate::HttpDate::from(self.0).fmt(f)
}
}
impl TryIntoHeaderValue for HttpDate {
type Error = InvalidHeaderValue;
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH);
let mut wrt = MutWriter(&mut buf);
// unwrap: date output is known to be well formed and of known length
write!(wrt, "{}", self).unwrap();
HeaderValue::from_maybe_shared(buf.split().freeze())
}
}
impl From<SystemTime> for HttpDate {
fn from(sys_time: SystemTime) -> HttpDate {
HttpDate(sys_time)
}
}
impl From<HttpDate> for SystemTime {
fn from(HttpDate(sys_time): HttpDate) -> SystemTime {
sys_time
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use super::*;
#[test]
fn date_header() {
macro_rules! assert_parsed_date {
($case:expr, $exp:expr) => {
assert_eq!($case.parse::<HttpDate>().unwrap(), $exp);
};
}
// 784198117 = SystemTime::from(datetime!(1994-11-07 08:48:37).assume_utc()).duration_since(SystemTime::UNIX_EPOCH));
let nov_07 = HttpDate(SystemTime::UNIX_EPOCH + Duration::from_secs(784198117));
assert_parsed_date!("Mon, 07 Nov 1994 08:48:37 GMT", nov_07);
assert_parsed_date!("Monday, 07-Nov-94 08:48:37 GMT", nov_07);
assert_parsed_date!("Mon Nov 7 08:48:37 1994", nov_07);
assert!("this-is-no-date".parse::<HttpDate>().is_err());
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/shared/quality.rs | actix-http/src/header/shared/quality.rs | use std::fmt;
use derive_more::{Display, Error};
const MAX_QUALITY_INT: u16 = 1000;
const MAX_QUALITY_FLOAT: f32 = 1.0;
/// Represents a quality used in q-factor values.
///
/// The default value is equivalent to `q=1.0` (the [max](Self::MAX) value).
///
/// # Implementation notes
/// The quality value is defined as a number between 0.0 and 1.0 with three decimal places.
/// This means there are 1001 possible values. Since floating point numbers are not exact and the
/// smallest floating point data type (`f32`) consumes four bytes, we use an `u16` value to store
/// the quality internally.
///
/// [RFC 7231 §5.3.1] gives more information on quality values in HTTP header fields.
///
/// # Examples
/// ```
/// use actix_http::header::{Quality, q};
/// assert_eq!(q(1.0), Quality::MAX);
///
/// assert_eq!(q(0.42).to_string(), "0.42");
/// assert_eq!(q(1.0).to_string(), "1");
/// assert_eq!(Quality::MIN.to_string(), "0.001");
/// assert_eq!(Quality::ZERO.to_string(), "0");
/// ```
///
/// [RFC 7231 §5.3.1]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.1
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct Quality(pub(super) u16);
impl Quality {
/// The maximum quality value, equivalent to `q=1.0`.
pub const MAX: Quality = Quality(MAX_QUALITY_INT);
/// The minimum, non-zero quality value, equivalent to `q=0.001`.
pub const MIN: Quality = Quality(1);
/// The zero quality value, equivalent to `q=0.0`.
pub const ZERO: Quality = Quality(0);
/// Converts a float in the range 0.0–1.0 to a `Quality`.
///
/// Intentionally private. External uses should rely on the `TryFrom` impl.
///
/// # Panics
/// Panics in debug mode when value is not in the range 0.0 <= n <= 1.0.
fn from_f32(value: f32) -> Self {
// Check that `value` is within range should be done before calling this method.
// Just in case, this debug_assert should catch if we were forgetful.
debug_assert!(
(0.0..=MAX_QUALITY_FLOAT).contains(&value),
"q value must be between 0.0 and 1.0"
);
Quality((value * MAX_QUALITY_INT as f32) as u16)
}
}
/// The default value is [`Quality::MAX`].
impl Default for Quality {
fn default() -> Quality {
Quality::MAX
}
}
impl fmt::Display for Quality {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.0 {
0 => f.write_str("0"),
MAX_QUALITY_INT => f.write_str("1"),
// some number in the range 1–999
x => {
f.write_str("0.")?;
// This implementation avoids string allocation for removing trailing zeroes.
// In benchmarks it is twice as fast as approach using something like
// `format!("{}").trim_end_matches('0')` for non-fast-path quality values.
if x < 10 {
// x in is range 1–9
f.write_str("00")?;
// 0 is already handled so it's not possible to have a trailing 0 in this range
// we can just write the integer
itoa_fmt(f, x)
} else if x < 100 {
// x in is range 10–99
f.write_str("0")?;
if x % 10 == 0 {
// trailing 0, divide by 10 and write
itoa_fmt(f, x / 10)
} else {
itoa_fmt(f, x)
}
} else {
// x is in range 100–999
if x % 100 == 0 {
// two trailing 0s, divide by 100 and write
itoa_fmt(f, x / 100)
} else if x % 10 == 0 {
// one trailing 0, divide by 10 and write
itoa_fmt(f, x / 10)
} else {
itoa_fmt(f, x)
}
}
}
}
}
}
/// Write integer to a `fmt::Write`.
pub fn itoa_fmt<W: fmt::Write, V: itoa::Integer>(mut wr: W, value: V) -> fmt::Result {
let mut buf = itoa::Buffer::new();
wr.write_str(buf.format(value))
}
#[derive(Debug, Clone, Display, Error)]
#[display("quality out of bounds")]
#[non_exhaustive]
pub struct QualityOutOfBounds;
impl TryFrom<f32> for Quality {
type Error = QualityOutOfBounds;
#[inline]
fn try_from(value: f32) -> Result<Self, Self::Error> {
if (0.0..=MAX_QUALITY_FLOAT).contains(&value) {
Ok(Quality::from_f32(value))
} else {
Err(QualityOutOfBounds)
}
}
}
/// Convenience function to create a [`Quality`] from an `f32` (0.0–1.0).
///
/// Not recommended for use with user input. Rely on the `TryFrom` impls where possible.
///
/// # Panics
/// Panics if value is out of range.
///
/// # Examples
/// ```
/// # use actix_http::header::{q, Quality};
/// let q1 = q(1.0);
/// assert_eq!(q1, Quality::MAX);
///
/// let q2 = q(0.001);
/// assert_eq!(q2, Quality::MIN);
///
/// let q3 = q(0.0);
/// assert_eq!(q3, Quality::ZERO);
///
/// let q4 = q(0.42);
/// ```
///
/// An out-of-range `f32` quality will panic.
/// ```should_panic
/// # use actix_http::header::q;
/// let _q2 = q(1.42);
/// ```
#[inline]
pub fn q<T>(quality: T) -> Quality
where
T: TryInto<Quality>,
T::Error: fmt::Debug,
{
quality.try_into().expect("quality value was out of bounds")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn q_helper() {
assert_eq!(q(0.5), Quality(500));
}
#[test]
fn display_output() {
assert_eq!(Quality::ZERO.to_string(), "0");
assert_eq!(Quality::MIN.to_string(), "0.001");
assert_eq!(Quality::MAX.to_string(), "1");
assert_eq!(q(0.0).to_string(), "0");
assert_eq!(q(1.0).to_string(), "1");
assert_eq!(q(0.001).to_string(), "0.001");
assert_eq!(q(0.5).to_string(), "0.5");
assert_eq!(q(0.22).to_string(), "0.22");
assert_eq!(q(0.123).to_string(), "0.123");
assert_eq!(q(0.999).to_string(), "0.999");
for x in 0..=1000 {
// if trailing zeroes are handled correctly, we would not expect the serialized length
// to ever exceed "0." + 3 decimal places = 5 in length
assert!(q(x as f32 / 1000.0).to_string().len() <= 5);
}
}
#[test]
#[should_panic]
fn negative_quality() {
q(-1.0);
}
#[test]
#[should_panic]
fn quality_out_of_bounds() {
q(2.0);
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/shared/content_encoding.rs | actix-http/src/header/shared/content_encoding.rs | use std::str::FromStr;
use derive_more::{Display, Error};
use http::header::InvalidHeaderValue;
use crate::{
error::ParseError,
header::{self, from_one_raw_str, Header, HeaderName, HeaderValue, TryIntoHeaderValue},
HttpMessage,
};
/// Error returned when a content encoding is unknown.
#[derive(Debug, Display, Error)]
#[display("unsupported content encoding")]
pub struct ContentEncodingParseError;
/// Represents a supported content encoding.
///
/// Includes a commonly-used subset of media types appropriate for use as HTTP content encodings.
/// See [IANA HTTP Content Coding Registry].
///
/// [IANA HTTP Content Coding Registry]: https://www.iana.org/assignments/http-parameters/http-parameters.xhtml
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub enum ContentEncoding {
/// Indicates the no-op identity encoding.
///
/// I.e., no compression or modification.
Identity,
/// A format using the Brotli algorithm.
Brotli,
/// A format using the zlib structure with deflate algorithm.
Deflate,
/// Gzip algorithm.
Gzip,
/// Zstd algorithm.
Zstd,
}
impl ContentEncoding {
/// Convert content encoding to string.
#[inline]
pub const fn as_str(self) -> &'static str {
match self {
ContentEncoding::Brotli => "br",
ContentEncoding::Gzip => "gzip",
ContentEncoding::Deflate => "deflate",
ContentEncoding::Zstd => "zstd",
ContentEncoding::Identity => "identity",
}
}
/// Convert content encoding to header value.
#[inline]
pub const fn to_header_value(self) -> HeaderValue {
match self {
ContentEncoding::Brotli => HeaderValue::from_static("br"),
ContentEncoding::Gzip => HeaderValue::from_static("gzip"),
ContentEncoding::Deflate => HeaderValue::from_static("deflate"),
ContentEncoding::Zstd => HeaderValue::from_static("zstd"),
ContentEncoding::Identity => HeaderValue::from_static("identity"),
}
}
}
impl Default for ContentEncoding {
#[inline]
fn default() -> Self {
Self::Identity
}
}
impl FromStr for ContentEncoding {
type Err = ContentEncodingParseError;
fn from_str(enc: &str) -> Result<Self, Self::Err> {
let enc = enc.trim();
if enc.eq_ignore_ascii_case("br") {
Ok(ContentEncoding::Brotli)
} else if enc.eq_ignore_ascii_case("gzip") {
Ok(ContentEncoding::Gzip)
} else if enc.eq_ignore_ascii_case("deflate") {
Ok(ContentEncoding::Deflate)
} else if enc.eq_ignore_ascii_case("identity") {
Ok(ContentEncoding::Identity)
} else if enc.eq_ignore_ascii_case("zstd") {
Ok(ContentEncoding::Zstd)
} else {
Err(ContentEncodingParseError)
}
}
}
impl TryFrom<&str> for ContentEncoding {
type Error = ContentEncodingParseError;
fn try_from(val: &str) -> Result<Self, Self::Error> {
val.parse()
}
}
impl TryIntoHeaderValue for ContentEncoding {
type Error = InvalidHeaderValue;
fn try_into_value(self) -> Result<http::HeaderValue, Self::Error> {
Ok(HeaderValue::from_static(self.as_str()))
}
}
impl Header for ContentEncoding {
fn name() -> HeaderName {
header::CONTENT_ENCODING
}
fn parse<T: HttpMessage>(msg: &T) -> Result<Self, ParseError> {
from_one_raw_str(msg.headers().get(Self::name()))
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/shared/mod.rs | actix-http/src/header/shared/mod.rs | //! Originally taken from `hyper::header::shared`.
pub use language_tags::LanguageTag;
mod charset;
mod content_encoding;
mod extended;
mod http_date;
mod quality;
mod quality_item;
pub use self::{
charset::Charset,
content_encoding::ContentEncoding,
extended::{parse_extended_value, ExtendedValue},
http_date::HttpDate,
quality::{q, Quality},
quality_item::QualityItem,
};
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/shared/quality_item.rs | actix-http/src/header/shared/quality_item.rs | use std::{cmp, fmt, str};
use super::Quality;
use crate::error::ParseError;
/// Represents an item with a quality value as defined
/// in [RFC 7231 §5.3.1](https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.1).
///
/// # Parsing and Formatting
/// This wrapper be used to parse header value items that have a q-factor annotation as well as
/// serialize items with a their q-factor.
///
/// # Ordering
/// Since this context of use for this type is header value items, ordering is defined for
/// `QualityItem`s but _only_ considers the item's quality. Order of appearance should be used as
/// the secondary sorting parameter; i.e., a stable sort over the quality values will produce a
/// correctly sorted sequence.
///
/// # Examples
/// ```
/// # use actix_http::header::{QualityItem, q};
/// let q_item: QualityItem<String> = "hello;q=0.3".parse().unwrap();
/// assert_eq!(&q_item.item, "hello");
/// assert_eq!(q_item.quality, q(0.3));
///
/// // note that format is normalized compared to parsed item
/// assert_eq!(q_item.to_string(), "hello; q=0.3");
///
/// // item with q=0.3 is greater than item with q=0.1
/// let q_item_fallback: QualityItem<String> = "abc;q=0.1".parse().unwrap();
/// assert!(q_item > q_item_fallback);
/// ```
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct QualityItem<T> {
/// The wrapped contents of the field.
pub item: T,
/// The quality (client or server preference) for the value.
pub quality: Quality,
}
impl<T> QualityItem<T> {
/// Constructs a new `QualityItem` from an item and a quality value.
///
/// The item can be of any type. The quality should be a value in the range [0, 1].
pub fn new(item: T, quality: Quality) -> Self {
QualityItem { item, quality }
}
/// Constructs a new `QualityItem` from an item, using the maximum q-value.
pub fn max(item: T) -> Self {
Self::new(item, Quality::MAX)
}
/// Constructs a new `QualityItem` from an item, using the minimum, non-zero q-value.
pub fn min(item: T) -> Self {
Self::new(item, Quality::MIN)
}
/// Constructs a new `QualityItem` from an item, using zero q-value of zero.
pub fn zero(item: T) -> Self {
Self::new(item, Quality::ZERO)
}
}
impl<T: PartialEq> PartialOrd for QualityItem<T> {
fn partial_cmp(&self, other: &QualityItem<T>) -> Option<cmp::Ordering> {
self.quality.partial_cmp(&other.quality)
}
}
impl<T: fmt::Display> fmt::Display for QualityItem<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.item, f)?;
match self.quality {
// q-factor value is implied for max value
Quality::MAX => Ok(()),
// fast path for zero
Quality::ZERO => f.write_str("; q=0"),
// quality formatting is already using itoa
q => write!(f, "; q={}", q),
}
}
}
impl<T: str::FromStr> str::FromStr for QualityItem<T> {
type Err = ParseError;
fn from_str(q_item_str: &str) -> Result<Self, Self::Err> {
if !q_item_str.is_ascii() {
return Err(ParseError::Header);
}
// set defaults used if quality-item parsing fails, i.e., item has no q attribute
let mut raw_item = q_item_str;
let mut quality = Quality::MAX;
let parts = q_item_str
.rsplit_once(';')
.map(|(item, q_attr)| (item.trim(), q_attr.trim()));
if let Some((val, q_attr)) = parts {
// example for item with q-factor:
//
// gzip;q=0.65
// ^^^^ val
// ^^^^^^ q_attr
// ^^ q
// ^^^^ q_val
if q_attr.len() < 2 {
// Can't possibly be an attribute since an attribute needs at least a name followed
// by an equals sign. And bare identifiers are forbidden.
return Err(ParseError::Header);
}
let q = &q_attr[0..2];
if q == "q=" || q == "Q=" {
let q_val = &q_attr[2..];
if q_val.len() > 5 {
// longer than 5 indicates an over-precise q-factor
return Err(ParseError::Header);
}
let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
let q_value = Quality::try_from(q_value).map_err(|_| ParseError::Header)?;
quality = q_value;
raw_item = val;
}
}
let item = raw_item.parse::<T>().map_err(|_| ParseError::Header)?;
Ok(QualityItem::new(item, quality))
}
}
#[cfg(test)]
mod tests {
use super::*;
// copy of encoding from actix-web headers
#[allow(clippy::enum_variant_names)] // allow Encoding prefix on EncodingExt
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Encoding {
Chunked,
Brotli,
Gzip,
Deflate,
Compress,
Identity,
Trailers,
EncodingExt(String),
}
impl fmt::Display for Encoding {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use Encoding::*;
f.write_str(match *self {
Chunked => "chunked",
Brotli => "br",
Gzip => "gzip",
Deflate => "deflate",
Compress => "compress",
Identity => "identity",
Trailers => "trailers",
EncodingExt(ref s) => s.as_ref(),
})
}
}
impl str::FromStr for Encoding {
type Err = crate::error::ParseError;
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
use Encoding::*;
match s {
"chunked" => Ok(Chunked),
"br" => Ok(Brotli),
"deflate" => Ok(Deflate),
"gzip" => Ok(Gzip),
"compress" => Ok(Compress),
"identity" => Ok(Identity),
"trailers" => Ok(Trailers),
_ => Ok(EncodingExt(s.to_owned())),
}
}
}
#[test]
fn test_quality_item_fmt_q_1() {
use Encoding::*;
let x = QualityItem::max(Chunked);
assert_eq!(format!("{}", x), "chunked");
}
#[test]
fn test_quality_item_fmt_q_0001() {
use Encoding::*;
let x = QualityItem::new(Chunked, Quality(1));
assert_eq!(format!("{}", x), "chunked; q=0.001");
}
#[test]
fn test_quality_item_fmt_q_05() {
use Encoding::*;
// Custom value
let x = QualityItem {
item: EncodingExt("identity".to_owned()),
quality: Quality(500),
};
assert_eq!(format!("{}", x), "identity; q=0.5");
}
#[test]
fn test_quality_item_fmt_q_0() {
use Encoding::*;
// Custom value
let x = QualityItem {
item: EncodingExt("identity".to_owned()),
quality: Quality(0),
};
assert_eq!(x.to_string(), "identity; q=0");
}
#[test]
fn test_quality_item_from_str1() {
use Encoding::*;
let x: Result<QualityItem<Encoding>, _> = "chunked".parse();
assert_eq!(
x.unwrap(),
QualityItem {
item: Chunked,
quality: Quality(1000),
}
);
}
#[test]
fn test_quality_item_from_str2() {
use Encoding::*;
let x: Result<QualityItem<Encoding>, _> = "chunked; q=1".parse();
assert_eq!(
x.unwrap(),
QualityItem {
item: Chunked,
quality: Quality(1000),
}
);
}
#[test]
fn test_quality_item_from_str3() {
use Encoding::*;
let x: Result<QualityItem<Encoding>, _> = "gzip; q=0.5".parse();
assert_eq!(
x.unwrap(),
QualityItem {
item: Gzip,
quality: Quality(500),
}
);
}
#[test]
fn test_quality_item_from_str4() {
use Encoding::*;
let x: Result<QualityItem<Encoding>, _> = "gzip; q=0.273".parse();
assert_eq!(
x.unwrap(),
QualityItem {
item: Gzip,
quality: Quality(273),
}
);
}
#[test]
fn test_quality_item_from_str5() {
let x: Result<QualityItem<Encoding>, _> = "gzip; q=0.2739999".parse();
assert!(x.is_err());
}
#[test]
fn test_quality_item_from_str6() {
let x: Result<QualityItem<Encoding>, _> = "gzip; q=2".parse();
assert!(x.is_err());
}
#[test]
fn test_quality_item_ordering() {
let x: QualityItem<Encoding> = "gzip; q=0.5".parse().ok().unwrap();
let y: QualityItem<Encoding> = "gzip; q=0.273".parse().ok().unwrap();
let comparison_result: bool = x.gt(&y);
assert!(comparison_result)
}
#[test]
fn test_fuzzing_bugs() {
assert!("99999;".parse::<QualityItem<String>>().is_err());
assert!("\x0d;;;=\u{d6aa}==".parse::<QualityItem<String>>().is_err())
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/header/shared/extended.rs | actix-http/src/header/shared/extended.rs | //! Originally taken from `hyper::header::parsing`.
use std::{fmt, str::FromStr};
use language_tags::LanguageTag;
use crate::header::{Charset, HTTP_VALUE};
/// The value part of an extended parameter consisting of three parts:
/// - The REQUIRED character set name (`charset`).
/// - The OPTIONAL language information (`language_tag`).
/// - A character sequence representing the actual value (`value`), separated by single quotes.
///
/// It is defined in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExtendedValue {
/// The character set that is used to encode the `value` to a string.
pub charset: Charset,
/// The human language details of the `value`, if available.
pub language_tag: Option<LanguageTag>,
/// The parameter value, as expressed in octets.
pub value: Vec<u8>,
}
/// Parses extended header parameter values (`ext-value`), as defined
/// in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).
///
/// Extended values are denoted by parameter names that end with `*`.
///
/// ## ABNF
///
/// ```plain
/// ext-value = charset "'" [ language ] "'" value-chars
/// ; like RFC 2231's <extended-initial-value>
/// ; (see [RFC 2231 §7])
///
/// charset = "UTF-8" / "ISO-8859-1" / mime-charset
///
/// mime-charset = 1*mime-charsetc
/// mime-charsetc = ALPHA / DIGIT
/// / "!" / "#" / "$" / "%" / "&"
/// / "+" / "-" / "^" / "_" / "`"
/// / "{" / "}" / "~"
/// ; as <mime-charset> in [RFC 2978 §2.3]
/// ; except that the single quote is not included
/// ; SHOULD be registered in the IANA charset registry
///
/// language = <Language-Tag, defined in [RFC 5646 §2.1]>
///
/// value-chars = *( pct-encoded / attr-char )
///
/// pct-encoded = "%" HEXDIG HEXDIG
/// ; see [RFC 3986 §2.1]
///
/// attr-char = ALPHA / DIGIT
/// / "!" / "#" / "$" / "&" / "+" / "-" / "."
/// / "^" / "_" / "`" / "|" / "~"
/// ; token except ( "*" / "'" / "%" )
/// ```
///
/// [RFC 2231 §7]: https://datatracker.ietf.org/doc/html/rfc2231#section-7
/// [RFC 2978 §2.3]: https://datatracker.ietf.org/doc/html/rfc2978#section-2.3
/// [RFC 3986 §2.1]: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1
pub fn parse_extended_value(val: &str) -> Result<ExtendedValue, crate::error::ParseError> {
// Break into three pieces separated by the single-quote character
let mut parts = val.splitn(3, '\'');
// Interpret the first piece as a Charset
let charset: Charset = match parts.next() {
None => return Err(crate::error::ParseError::Header),
Some(n) => FromStr::from_str(n).map_err(|_| crate::error::ParseError::Header)?,
};
// Interpret the second piece as a language tag
let language_tag: Option<LanguageTag> = match parts.next() {
None => return Err(crate::error::ParseError::Header),
Some("") => None,
Some(s) => match s.parse() {
Ok(lt) => Some(lt),
Err(_) => return Err(crate::error::ParseError::Header),
},
};
// Interpret the third piece as a sequence of value characters
let value: Vec<u8> = match parts.next() {
None => return Err(crate::error::ParseError::Header),
Some(v) => percent_encoding::percent_decode(v.as_bytes()).collect(),
};
Ok(ExtendedValue {
charset,
language_tag,
value,
})
}
impl fmt::Display for ExtendedValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let encoded_value = percent_encoding::percent_encode(&self.value[..], HTTP_VALUE);
if let Some(ref lang) = self.language_tag {
write!(f, "{}'{}'{}", self.charset, lang, encoded_value)
} else {
write!(f, "{}''{}", self.charset, encoded_value)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_extended_value_with_encoding_and_language_tag() {
let expected_language_tag = "en".parse::<LanguageTag>().unwrap();
// RFC 5987, Section 3.2.2
// Extended notation, using the Unicode character U+00A3 (POUND SIGN)
let result = parse_extended_value("iso-8859-1'en'%A3%20rates");
assert!(result.is_ok());
let extended_value = result.unwrap();
assert_eq!(Charset::Iso_8859_1, extended_value.charset);
assert!(extended_value.language_tag.is_some());
assert_eq!(expected_language_tag, extended_value.language_tag.unwrap());
assert_eq!(
vec![163, b' ', b'r', b'a', b't', b'e', b's'],
extended_value.value
);
}
#[test]
fn test_parse_extended_value_with_encoding() {
// RFC 5987, Section 3.2.2
// Extended notation, using the Unicode characters U+00A3 (POUND SIGN)
// and U+20AC (EURO SIGN)
let result = parse_extended_value("UTF-8''%c2%a3%20and%20%e2%82%ac%20rates");
assert!(result.is_ok());
let extended_value = result.unwrap();
assert_eq!(Charset::Ext("UTF-8".to_string()), extended_value.charset);
assert!(extended_value.language_tag.is_none());
assert_eq!(
vec![
194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a', b't',
b'e', b's',
],
extended_value.value
);
}
#[test]
fn test_parse_extended_value_missing_language_tag_and_encoding() {
// From: https://greenbytes.de/tech/tc2231/#attwithfn2231quot2
let result = parse_extended_value("foo%20bar.html");
assert!(result.is_err());
}
#[test]
fn test_parse_extended_value_partially_formatted() {
let result = parse_extended_value("UTF-8'missing third part");
assert!(result.is_err());
}
#[test]
fn test_parse_extended_value_partially_formatted_blank() {
let result = parse_extended_value("blank second part'");
assert!(result.is_err());
}
#[test]
fn test_fmt_extended_value_with_encoding_and_language_tag() {
let extended_value = ExtendedValue {
charset: Charset::Iso_8859_1,
language_tag: Some("en".parse().expect("Could not parse language tag")),
value: vec![163, b' ', b'r', b'a', b't', b'e', b's'],
};
assert_eq!("ISO-8859-1'en'%A3%20rates", format!("{}", extended_value));
}
#[test]
fn test_fmt_extended_value_with_encoding() {
let extended_value = ExtendedValue {
charset: Charset::Ext("UTF-8".to_string()),
language_tag: None,
value: vec![
194, 163, b' ', b'a', b'n', b'd', b' ', 226, 130, 172, b' ', b'r', b'a', b't',
b'e', b's',
],
};
assert_eq!(
"UTF-8''%C2%A3%20and%20%E2%82%AC%20rates",
format!("{}", extended_value)
);
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/responses/builder.rs | actix-http/src/responses/builder.rs | //! HTTP response builder.
use std::{cell::RefCell, fmt, str};
use crate::{
body::{EitherBody, MessageBody},
error::{Error, HttpError},
header::{self, TryIntoHeaderPair, TryIntoHeaderValue},
responses::{BoxedResponseHead, ResponseHead},
ConnectionType, Extensions, Response, StatusCode,
};
/// An HTTP response builder.
///
/// Used to construct an instance of `Response` using a builder pattern. Response builders are often
/// created using [`Response::build`].
///
/// # Examples
/// ```
/// use actix_http::{Response, ResponseBuilder, StatusCode, body, header};
///
/// # actix_rt::System::new().block_on(async {
/// let mut res: Response<_> = Response::build(StatusCode::OK)
/// .content_type(mime::APPLICATION_JSON)
/// .insert_header((header::SERVER, "my-app/1.0"))
/// .append_header((header::SET_COOKIE, "a=1"))
/// .append_header((header::SET_COOKIE, "b=2"))
/// .body("1234");
///
/// assert_eq!(res.status(), StatusCode::OK);
///
/// assert!(res.headers().contains_key("server"));
/// assert_eq!(res.headers().get_all("set-cookie").count(), 2);
///
/// assert_eq!(body::to_bytes(res.into_body()).await.unwrap(), &b"1234"[..]);
/// # })
/// ```
pub struct ResponseBuilder {
head: Option<BoxedResponseHead>,
err: Option<HttpError>,
}
impl ResponseBuilder {
/// Create response builder
///
/// # Examples
/// ```
/// use actix_http::{Response, ResponseBuilder, StatusCode};
/// let res: Response<_> = ResponseBuilder::default().finish();
/// assert_eq!(res.status(), StatusCode::OK);
/// ```
#[inline]
pub fn new(status: StatusCode) -> Self {
ResponseBuilder {
head: Some(BoxedResponseHead::new(status)),
err: None,
}
}
/// Set HTTP status code of this response.
///
/// # Examples
/// ```
/// use actix_http::{ResponseBuilder, StatusCode};
/// let res = ResponseBuilder::default().status(StatusCode::NOT_FOUND).finish();
/// assert_eq!(res.status(), StatusCode::NOT_FOUND);
/// ```
#[inline]
pub fn status(&mut self, status: StatusCode) -> &mut Self {
if let Some(parts) = self.inner() {
parts.status = status;
}
self
}
/// Insert a header, replacing any that were set with an equivalent field name.
///
/// # Examples
/// ```
/// use actix_http::{ResponseBuilder, header};
///
/// let res = ResponseBuilder::default()
/// .insert_header((header::CONTENT_TYPE, mime::APPLICATION_JSON))
/// .insert_header(("X-TEST", "value"))
/// .finish();
///
/// assert!(res.headers().contains_key("content-type"));
/// assert!(res.headers().contains_key("x-test"));
/// ```
pub fn insert_header(&mut self, header: impl TryIntoHeaderPair) -> &mut Self {
if let Some(parts) = self.inner() {
match header.try_into_pair() {
Ok((key, value)) => {
parts.headers.insert(key, value);
}
Err(err) => self.err = Some(err.into()),
};
}
self
}
/// Append a header, keeping any that were set with an equivalent field name.
///
/// # Examples
/// ```
/// use actix_http::{ResponseBuilder, header};
///
/// let res = ResponseBuilder::default()
/// .append_header((header::CONTENT_TYPE, mime::APPLICATION_JSON))
/// .append_header(("X-TEST", "value1"))
/// .append_header(("X-TEST", "value2"))
/// .finish();
///
/// assert_eq!(res.headers().get_all("content-type").count(), 1);
/// assert_eq!(res.headers().get_all("x-test").count(), 2);
/// ```
pub fn append_header(&mut self, header: impl TryIntoHeaderPair) -> &mut Self {
if let Some(parts) = self.inner() {
match header.try_into_pair() {
Ok((key, value)) => parts.headers.append(key, value),
Err(err) => self.err = Some(err.into()),
};
}
self
}
/// Set the custom reason for the response.
#[inline]
pub fn reason(&mut self, reason: &'static str) -> &mut Self {
if let Some(parts) = self.inner() {
parts.reason = Some(reason);
}
self
}
/// Set connection type to KeepAlive
#[inline]
pub fn keep_alive(&mut self) -> &mut Self {
if let Some(parts) = self.inner() {
parts.set_connection_type(ConnectionType::KeepAlive);
}
self
}
/// Set connection type to `Upgrade`.
#[inline]
pub fn upgrade<V>(&mut self, value: V) -> &mut Self
where
V: TryIntoHeaderValue,
{
if let Some(parts) = self.inner() {
parts.set_connection_type(ConnectionType::Upgrade);
}
if let Ok(value) = value.try_into_value() {
self.insert_header((header::UPGRADE, value));
}
self
}
/// Force-close connection, even if it is marked as keep-alive.
#[inline]
pub fn force_close(&mut self) -> &mut Self {
if let Some(parts) = self.inner() {
parts.set_connection_type(ConnectionType::Close);
}
self
}
/// Disable chunked transfer encoding for HTTP/1.1 streaming responses.
#[inline]
pub fn no_chunking(&mut self, len: u64) -> &mut Self {
let mut buf = itoa::Buffer::new();
self.insert_header((header::CONTENT_LENGTH, buf.format(len)));
if let Some(parts) = self.inner() {
parts.no_chunking(true);
}
self
}
/// Set response content type.
#[inline]
pub fn content_type<V>(&mut self, value: V) -> &mut Self
where
V: TryIntoHeaderValue,
{
if let Some(parts) = self.inner() {
match value.try_into_value() {
Ok(value) => {
parts.headers.insert(header::CONTENT_TYPE, value);
}
Err(err) => self.err = Some(err.into()),
};
}
self
}
/// Generate response with a wrapped body.
///
/// This `ResponseBuilder` will be left in a useless state.
pub fn body<B>(&mut self, body: B) -> Response<EitherBody<B>>
where
B: MessageBody + 'static,
{
match self.message_body(body) {
Ok(res) => res.map_body(|_, body| EitherBody::left(body)),
Err(err) => Response::from(err).map_body(|_, body| EitherBody::right(body)),
}
}
/// Generate response with a body.
///
/// This `ResponseBuilder` will be left in a useless state.
pub fn message_body<B>(&mut self, body: B) -> Result<Response<B>, Error> {
if let Some(err) = self.err.take() {
return Err(Error::new_http().with_cause(err));
}
let head = self.head.take().expect("cannot reuse response builder");
Ok(Response {
head,
body,
extensions: RefCell::new(Extensions::new()),
})
}
/// Generate response with an empty body.
///
/// This `ResponseBuilder` will be left in a useless state.
#[inline]
pub fn finish(&mut self) -> Response<EitherBody<()>> {
self.body(())
}
/// Create an owned `ResponseBuilder`, leaving the original in a useless state.
pub fn take(&mut self) -> ResponseBuilder {
ResponseBuilder {
head: self.head.take(),
err: self.err.take(),
}
}
/// Get access to the inner response head if there has been no error.
fn inner(&mut self) -> Option<&mut ResponseHead> {
if self.err.is_some() {
return None;
}
self.head.as_deref_mut()
}
}
impl Default for ResponseBuilder {
fn default() -> Self {
Self::new(StatusCode::OK)
}
}
/// Convert `Response` to a `ResponseBuilder`. Body get dropped.
impl<B> From<Response<B>> for ResponseBuilder {
fn from(res: Response<B>) -> ResponseBuilder {
ResponseBuilder {
head: Some(res.head),
err: None,
}
}
}
/// Convert `ResponseHead` to a `ResponseBuilder`
impl<'a> From<&'a ResponseHead> for ResponseBuilder {
fn from(head: &'a ResponseHead) -> ResponseBuilder {
let mut msg = BoxedResponseHead::new(head.status);
msg.version = head.version;
msg.reason = head.reason;
for (k, v) in head.headers.iter() {
msg.headers.append(k.clone(), v.clone());
}
msg.no_chunking(!head.chunked());
ResponseBuilder {
head: Some(msg),
err: None,
}
}
}
impl fmt::Debug for ResponseBuilder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let head = self.head.as_ref().unwrap();
let res = writeln!(
f,
"\nResponseBuilder {:?} {}{}",
head.version,
head.status,
head.reason.unwrap_or(""),
);
let _ = writeln!(f, " headers:");
for (key, val) in head.headers.iter() {
let _ = writeln!(f, " {:?}: {:?}", key, val);
}
res
}
}
#[cfg(test)]
mod tests {
use bytes::Bytes;
use super::*;
use crate::header::{HeaderName, HeaderValue, CONTENT_TYPE};
#[test]
fn test_basic_builder() {
let resp = Response::build(StatusCode::OK)
.insert_header(("X-TEST", "value"))
.finish();
assert_eq!(resp.status(), StatusCode::OK);
}
#[test]
fn test_upgrade() {
let resp = Response::build(StatusCode::OK)
.upgrade("websocket")
.finish();
assert!(resp.upgrade());
assert_eq!(
resp.headers().get(header::UPGRADE).unwrap(),
HeaderValue::from_static("websocket")
);
}
#[test]
fn test_force_close() {
let resp = Response::build(StatusCode::OK).force_close().finish();
assert!(!resp.keep_alive());
}
#[test]
fn test_content_type() {
let resp = Response::build(StatusCode::OK)
.content_type("text/plain")
.body(Bytes::new());
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/plain");
let resp = Response::build(StatusCode::OK)
.content_type(mime::TEXT_JAVASCRIPT)
.body(Bytes::new());
assert_eq!(resp.headers().get(CONTENT_TYPE).unwrap(), "text/javascript");
}
#[test]
fn test_into_builder() {
let mut resp: Response<_> = "test".into();
assert_eq!(resp.status(), StatusCode::OK);
resp.headers_mut().insert(
HeaderName::from_static("cookie"),
HeaderValue::from_static("cookie1=val100"),
);
let mut builder: ResponseBuilder = resp.into();
let resp = builder.status(StatusCode::BAD_REQUEST).finish();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
let cookie = resp.headers().get_all("Cookie").next().unwrap();
assert_eq!(cookie.to_str().unwrap(), "cookie1=val100");
}
#[test]
fn response_builder_header_insert_kv() {
let mut res = Response::build(StatusCode::OK);
res.insert_header(("Content-Type", "application/octet-stream"));
let res = res.finish();
assert_eq!(
res.headers().get("Content-Type"),
Some(&HeaderValue::from_static("application/octet-stream"))
);
}
#[test]
fn response_builder_header_insert_typed() {
let mut res = Response::build(StatusCode::OK);
res.insert_header((header::CONTENT_TYPE, mime::APPLICATION_OCTET_STREAM));
let res = res.finish();
assert_eq!(
res.headers().get("Content-Type"),
Some(&HeaderValue::from_static("application/octet-stream"))
);
}
#[test]
fn response_builder_header_append_kv() {
let mut res = Response::build(StatusCode::OK);
res.append_header(("Content-Type", "application/octet-stream"));
res.append_header(("Content-Type", "application/json"));
let res = res.finish();
let headers: Vec<_> = res.headers().get_all("Content-Type").cloned().collect();
assert_eq!(headers.len(), 2);
assert!(headers.contains(&HeaderValue::from_static("application/octet-stream")));
assert!(headers.contains(&HeaderValue::from_static("application/json")));
}
#[test]
fn response_builder_header_append_typed() {
let mut res = Response::build(StatusCode::OK);
res.append_header((header::CONTENT_TYPE, mime::APPLICATION_OCTET_STREAM));
res.append_header((header::CONTENT_TYPE, mime::APPLICATION_JSON));
let res = res.finish();
let headers: Vec<_> = res.headers().get_all("Content-Type").cloned().collect();
assert_eq!(headers.len(), 2);
assert!(headers.contains(&HeaderValue::from_static("application/octet-stream")));
assert!(headers.contains(&HeaderValue::from_static("application/json")));
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/responses/response.rs | actix-http/src/responses/response.rs | //! HTTP response.
use std::{
cell::{Ref, RefCell, RefMut},
fmt, str,
};
use bytes::{Bytes, BytesMut};
use bytestring::ByteString;
use crate::{
body::{BoxBody, EitherBody, MessageBody},
header::{self, HeaderMap, TryIntoHeaderValue},
responses::BoxedResponseHead,
Error, Extensions, ResponseBuilder, ResponseHead, StatusCode,
};
/// An HTTP response.
pub struct Response<B> {
pub(crate) head: BoxedResponseHead,
pub(crate) body: B,
pub(crate) extensions: RefCell<Extensions>,
}
impl Response<BoxBody> {
/// Constructs a new response with default body.
#[inline]
pub fn new(status: StatusCode) -> Self {
Response {
head: BoxedResponseHead::new(status),
body: BoxBody::new(()),
extensions: RefCell::new(Extensions::new()),
}
}
/// Constructs a new response builder.
#[inline]
pub fn build(status: StatusCode) -> ResponseBuilder {
ResponseBuilder::new(status)
}
// just a couple frequently used shortcuts
// this list should not grow larger than a few
/// Constructs a new response with status 200 OK.
#[inline]
pub fn ok() -> Self {
Response::new(StatusCode::OK)
}
/// Constructs a new response with status 400 Bad Request.
#[inline]
pub fn bad_request() -> Self {
Response::new(StatusCode::BAD_REQUEST)
}
/// Constructs a new response with status 404 Not Found.
#[inline]
pub fn not_found() -> Self {
Response::new(StatusCode::NOT_FOUND)
}
/// Constructs a new response with status 500 Internal Server Error.
#[inline]
pub fn internal_server_error() -> Self {
Response::new(StatusCode::INTERNAL_SERVER_ERROR)
}
// end shortcuts
}
impl<B> Response<B> {
/// Constructs a new response with given body.
#[inline]
pub fn with_body(status: StatusCode, body: B) -> Response<B> {
Response {
head: BoxedResponseHead::new(status),
body,
extensions: RefCell::new(Extensions::new()),
}
}
/// Returns a reference to the head of this response.
#[inline]
pub fn head(&self) -> &ResponseHead {
&self.head
}
/// Returns a mutable reference to the head of this response.
#[inline]
pub fn head_mut(&mut self) -> &mut ResponseHead {
&mut self.head
}
/// Returns the status code of this response.
#[inline]
pub fn status(&self) -> StatusCode {
self.head.status
}
/// Returns a mutable reference the status code of this response.
#[inline]
pub fn status_mut(&mut self) -> &mut StatusCode {
&mut self.head.status
}
/// Returns a reference to response headers.
#[inline]
pub fn headers(&self) -> &HeaderMap {
&self.head.headers
}
/// Returns a mutable reference to response headers.
#[inline]
pub fn headers_mut(&mut self) -> &mut HeaderMap {
&mut self.head.headers
}
/// Returns true if connection upgrade is enabled.
#[inline]
pub fn upgrade(&self) -> bool {
self.head.upgrade()
}
/// Returns true if keep-alive is enabled.
#[inline]
pub fn keep_alive(&self) -> bool {
self.head.keep_alive()
}
/// Returns a reference to the request-local data/extensions container.
#[inline]
pub fn extensions(&self) -> Ref<'_, Extensions> {
self.extensions.borrow()
}
/// Returns a mutable reference to the request-local data/extensions container.
#[inline]
pub fn extensions_mut(&mut self) -> RefMut<'_, Extensions> {
self.extensions.borrow_mut()
}
/// Returns a reference to the body of this response.
#[inline]
pub fn body(&self) -> &B {
&self.body
}
/// Sets new body.
#[inline]
pub fn set_body<B2>(self, body: B2) -> Response<B2> {
Response {
head: self.head,
body,
extensions: self.extensions,
}
}
/// Drops body and returns new response.
#[inline]
pub fn drop_body(self) -> Response<()> {
self.set_body(())
}
/// Sets new body, returning new response and previous body value.
#[inline]
pub(crate) fn replace_body<B2>(self, body: B2) -> (Response<B2>, B) {
(
Response {
head: self.head,
body,
extensions: self.extensions,
},
self.body,
)
}
/// Returns split head and body.
///
/// # Implementation Notes
/// Due to internal performance optimizations, the first element of the returned tuple is a
/// `Response` as well but only contains the head of the response this was called on.
#[inline]
pub fn into_parts(self) -> (Response<()>, B) {
self.replace_body(())
}
/// Map the current body type to another using a closure, returning a new response.
///
/// Closure receives the response head and the current body type.
#[inline]
pub fn map_body<F, B2>(mut self, f: F) -> Response<B2>
where
F: FnOnce(&mut ResponseHead, B) -> B2,
{
let body = f(&mut self.head, self.body);
Response {
head: self.head,
body,
extensions: self.extensions,
}
}
/// Map the current body to a type-erased `BoxBody`.
#[inline]
pub fn map_into_boxed_body(self) -> Response<BoxBody>
where
B: MessageBody + 'static,
{
self.map_body(|_, body| body.boxed())
}
/// Returns the response body, dropping all other parts.
#[inline]
pub fn into_body(self) -> B {
self.body
}
}
impl<B> fmt::Debug for Response<B>
where
B: MessageBody,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let res = writeln!(
f,
"\nResponse {:?} {}{}",
self.head.version,
self.head.status,
self.head.reason.unwrap_or(""),
);
let _ = writeln!(f, " headers:");
for (key, val) in self.head.headers.iter() {
let _ = writeln!(f, " {:?}: {:?}", key, val);
}
let _ = writeln!(f, " body: {:?}", self.body.size());
res
}
}
impl<B: Default> Default for Response<B> {
#[inline]
fn default() -> Response<B> {
Response::with_body(StatusCode::default(), B::default())
}
}
impl<I: Into<Response<BoxBody>>, E: Into<Error>> From<Result<I, E>> for Response<BoxBody> {
fn from(res: Result<I, E>) -> Self {
match res {
Ok(val) => val.into(),
Err(err) => Response::from(err.into()),
}
}
}
impl From<ResponseBuilder> for Response<EitherBody<()>> {
fn from(mut builder: ResponseBuilder) -> Self {
builder.finish()
}
}
impl From<std::convert::Infallible> for Response<BoxBody> {
fn from(val: std::convert::Infallible) -> Self {
match val {}
}
}
impl From<&'static str> for Response<&'static str> {
fn from(val: &'static str) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::TEXT_PLAIN_UTF_8.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<&'static [u8]> for Response<&'static [u8]> {
fn from(val: &'static [u8]) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<Vec<u8>> for Response<Vec<u8>> {
fn from(val: Vec<u8>) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<&Vec<u8>> for Response<Vec<u8>> {
fn from(val: &Vec<u8>) -> Self {
let mut res = Response::with_body(StatusCode::OK, val.clone());
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<String> for Response<String> {
fn from(val: String) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::TEXT_PLAIN_UTF_8.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<&String> for Response<String> {
fn from(val: &String) -> Self {
let mut res = Response::with_body(StatusCode::OK, val.clone());
let mime = mime::TEXT_PLAIN_UTF_8.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<Bytes> for Response<Bytes> {
fn from(val: Bytes) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<BytesMut> for Response<BytesMut> {
fn from(val: BytesMut) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::APPLICATION_OCTET_STREAM.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
impl From<ByteString> for Response<ByteString> {
fn from(val: ByteString) -> Self {
let mut res = Response::with_body(StatusCode::OK, val);
let mime = mime::TEXT_PLAIN_UTF_8.try_into_value().unwrap();
res.headers_mut().insert(header::CONTENT_TYPE, mime);
res
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
body::to_bytes,
header::{HeaderValue, CONTENT_TYPE, COOKIE},
};
#[test]
fn test_debug() {
let resp = Response::build(StatusCode::OK)
.append_header((COOKIE, HeaderValue::from_static("cookie1=value1; ")))
.append_header((COOKIE, HeaderValue::from_static("cookie2=value2; ")))
.finish();
let dbg = format!("{:?}", resp);
assert!(dbg.contains("Response"));
}
#[actix_rt::test]
async fn test_into_response() {
let res = Response::from("test");
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.headers().get(CONTENT_TYPE).unwrap(),
HeaderValue::from_static("text/plain; charset=utf-8")
);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(to_bytes(res.into_body()).await.unwrap(), &b"test"[..]);
let res = Response::from(b"test".as_ref());
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.headers().get(CONTENT_TYPE).unwrap(),
HeaderValue::from_static("application/octet-stream")
);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(to_bytes(res.into_body()).await.unwrap(), &b"test"[..]);
let res = Response::from("test".to_owned());
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.headers().get(CONTENT_TYPE).unwrap(),
HeaderValue::from_static("text/plain; charset=utf-8")
);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(to_bytes(res.into_body()).await.unwrap(), &b"test"[..]);
let res = Response::from("test".to_owned());
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.headers().get(CONTENT_TYPE).unwrap(),
HeaderValue::from_static("text/plain; charset=utf-8")
);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(to_bytes(res.into_body()).await.unwrap(), &b"test"[..]);
let b = Bytes::from_static(b"test");
let res = Response::from(b);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.headers().get(CONTENT_TYPE).unwrap(),
HeaderValue::from_static("application/octet-stream")
);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(to_bytes(res.into_body()).await.unwrap(), &b"test"[..]);
let b = Bytes::from_static(b"test");
let res = Response::from(b);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.headers().get(CONTENT_TYPE).unwrap(),
HeaderValue::from_static("application/octet-stream")
);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(to_bytes(res.into_body()).await.unwrap(), &b"test"[..]);
let b = BytesMut::from("test");
let res = Response::from(b);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.headers().get(CONTENT_TYPE).unwrap(),
HeaderValue::from_static("application/octet-stream")
);
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(to_bytes(res.into_body()).await.unwrap(), &b"test"[..]);
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/responses/mod.rs | actix-http/src/responses/mod.rs | //! HTTP response.
mod builder;
mod head;
#[allow(clippy::module_inception)]
mod response;
pub(crate) use self::head::BoxedResponseHead;
pub use self::{builder::ResponseBuilder, head::ResponseHead, response::Response};
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/src/responses/head.rs | actix-http/src/responses/head.rs | //! Response head type and caching pool.
use std::{cell::RefCell, ops};
use crate::{header::HeaderMap, message::Flags, ConnectionType, StatusCode, Version};
thread_local! {
static RESPONSE_POOL: BoxedResponsePool = BoxedResponsePool::create();
}
#[derive(Debug, Clone)]
pub struct ResponseHead {
pub version: Version,
pub status: StatusCode,
pub headers: HeaderMap,
pub reason: Option<&'static str>,
pub(crate) flags: Flags,
}
impl ResponseHead {
/// Create new instance of `ResponseHead` type
#[inline]
pub fn new(status: StatusCode) -> ResponseHead {
ResponseHead {
status,
version: Version::HTTP_11,
headers: HeaderMap::with_capacity(12),
reason: None,
flags: Flags::empty(),
}
}
/// Read the message headers.
#[inline]
pub fn headers(&self) -> &HeaderMap {
&self.headers
}
/// Mutable reference to the message headers.
#[inline]
pub fn headers_mut(&mut self) -> &mut HeaderMap {
&mut self.headers
}
/// Sets the flag that controls whether to send headers formatted as Camel-Case.
///
/// Only applicable to HTTP/1.x responses; HTTP/2 header names are always lowercase.
#[inline]
pub fn set_camel_case_headers(&mut self, camel_case: bool) {
if camel_case {
self.flags.insert(Flags::CAMEL_CASE);
} else {
self.flags.remove(Flags::CAMEL_CASE);
}
}
/// Set connection type of the message
#[inline]
pub fn set_connection_type(&mut self, ctype: ConnectionType) {
match ctype {
ConnectionType::Close => self.flags.insert(Flags::CLOSE),
ConnectionType::KeepAlive => self.flags.insert(Flags::KEEP_ALIVE),
ConnectionType::Upgrade => self.flags.insert(Flags::UPGRADE),
}
}
#[inline]
pub fn connection_type(&self) -> ConnectionType {
if self.flags.contains(Flags::CLOSE) {
ConnectionType::Close
} else if self.flags.contains(Flags::KEEP_ALIVE) {
ConnectionType::KeepAlive
} else if self.flags.contains(Flags::UPGRADE) {
ConnectionType::Upgrade
} else if self.version < Version::HTTP_11 {
ConnectionType::Close
} else {
ConnectionType::KeepAlive
}
}
/// Check if keep-alive is enabled
#[inline]
pub fn keep_alive(&self) -> bool {
self.connection_type() == ConnectionType::KeepAlive
}
/// Check upgrade status of this message
#[inline]
pub fn upgrade(&self) -> bool {
self.connection_type() == ConnectionType::Upgrade
}
/// Get custom reason for the response
#[inline]
pub fn reason(&self) -> &str {
self.reason.unwrap_or_else(|| {
self.status
.canonical_reason()
.unwrap_or("<unknown status code>")
})
}
#[inline]
pub(crate) fn conn_type(&self) -> Option<ConnectionType> {
if self.flags.contains(Flags::CLOSE) {
Some(ConnectionType::Close)
} else if self.flags.contains(Flags::KEEP_ALIVE) {
Some(ConnectionType::KeepAlive)
} else if self.flags.contains(Flags::UPGRADE) {
Some(ConnectionType::Upgrade)
} else {
None
}
}
/// Get response body chunking state
#[inline]
pub fn chunked(&self) -> bool {
!self.flags.contains(Flags::NO_CHUNKING)
}
/// Set no chunking for payload
#[inline]
pub fn no_chunking(&mut self, val: bool) {
if val {
self.flags.insert(Flags::NO_CHUNKING);
} else {
self.flags.remove(Flags::NO_CHUNKING);
}
}
}
pub(crate) struct BoxedResponseHead {
head: Option<Box<ResponseHead>>,
}
impl BoxedResponseHead {
/// Get new message from the pool of objects
pub fn new(status: StatusCode) -> Self {
RESPONSE_POOL.with(|p| p.get_message(status))
}
}
impl ops::Deref for BoxedResponseHead {
type Target = ResponseHead;
fn deref(&self) -> &Self::Target {
self.head.as_ref().unwrap()
}
}
impl ops::DerefMut for BoxedResponseHead {
fn deref_mut(&mut self) -> &mut Self::Target {
self.head.as_mut().unwrap()
}
}
impl Drop for BoxedResponseHead {
fn drop(&mut self) {
if let Some(head) = self.head.take() {
RESPONSE_POOL.with(move |p| p.release(head))
}
}
}
/// Response head object pool.
#[doc(hidden)]
pub struct BoxedResponsePool(#[allow(clippy::vec_box)] RefCell<Vec<Box<ResponseHead>>>);
impl BoxedResponsePool {
fn create() -> BoxedResponsePool {
BoxedResponsePool(RefCell::new(Vec::with_capacity(128)))
}
/// Get message from the pool.
#[inline]
fn get_message(&self, status: StatusCode) -> BoxedResponseHead {
if let Some(mut head) = self.0.borrow_mut().pop() {
head.reason = None;
head.status = status;
head.headers.clear();
head.flags = Flags::empty();
BoxedResponseHead { head: Some(head) }
} else {
BoxedResponseHead {
head: Some(Box::new(ResponseHead::new(status))),
}
}
}
/// Release request instance.
#[inline]
fn release(&self, msg: Box<ResponseHead>) {
let pool = &mut self.0.borrow_mut();
if pool.len() < 128 {
pool.push(msg);
}
}
}
#[cfg(test)]
mod tests {
use std::{
io::{Read as _, Write as _},
net,
};
use memchr::memmem;
use crate::{
h1::H1Service,
header::{HeaderName, HeaderValue},
Error, Request, Response, ServiceConfig,
};
#[actix_rt::test]
async fn camel_case_headers() {
let mut srv = actix_http_test::test_server(|| {
H1Service::with_config(ServiceConfig::default(), |req: Request| async move {
let mut res = Response::ok();
if req.path().contains("camel") {
res.head_mut().set_camel_case_headers(true);
}
res.headers_mut().insert(
HeaderName::from_static("foo-bar"),
HeaderValue::from_static("baz"),
);
Ok::<_, Error>(res)
})
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
stream
.write_all(b"GET /camel HTTP/1.1\r\nConnection: Close\r\n\r\n")
.unwrap();
let mut data = vec![];
let _ = stream.read_to_end(&mut data).unwrap();
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
assert!(memmem::find(&data, b"Foo-Bar").is_some());
assert!(memmem::find(&data, b"foo-bar").is_none());
assert!(memmem::find(&data, b"Date").is_some());
assert!(memmem::find(&data, b"date").is_none());
assert!(memmem::find(&data, b"Content-Length").is_some());
assert!(memmem::find(&data, b"content-length").is_none());
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
stream
.write_all(b"GET /lower HTTP/1.1\r\nConnection: Close\r\n\r\n")
.unwrap();
let mut data = vec![];
let _ = stream.read_to_end(&mut data).unwrap();
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
assert!(memmem::find(&data, b"Foo-Bar").is_none());
assert!(memmem::find(&data, b"foo-bar").is_some());
assert!(memmem::find(&data, b"Date").is_none());
assert!(memmem::find(&data, b"date").is_some());
assert!(memmem::find(&data, b"Content-Length").is_none());
assert!(memmem::find(&data, b"content-length").is_some());
srv.stop().await;
}
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/tests/test_server.rs | actix-http/tests/test_server.rs | use std::{
convert::Infallible,
io::{Read, Write},
net, thread,
time::{Duration, Instant},
};
use actix_http::{
body::{self, BodyStream, BoxBody, SizedStream},
header, Error, HttpService, KeepAlive, Request, Response, StatusCode, Version,
};
use actix_http_test::test_server;
use actix_rt::{net::TcpStream, time::sleep};
use actix_service::fn_service;
use actix_utils::future::{err, ok, ready};
use bytes::Bytes;
use derive_more::{Display, Error};
use futures_util::{stream::once, FutureExt as _, StreamExt as _};
use rand::Rng as _;
use regex::Regex;
#[actix_rt::test]
async fn h1_basic() {
let mut srv = test_server(|| {
HttpService::build()
.keep_alive(KeepAlive::Disabled)
.client_request_timeout(Duration::from_secs(1))
.client_disconnect_timeout(Duration::from_secs(1))
.h1(|req: Request| {
assert!(req.peer_addr().is_some());
ok::<_, Infallible>(Response::ok())
})
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
srv.stop().await;
}
#[actix_rt::test]
async fn h1_2() {
let mut srv = test_server(|| {
HttpService::build()
.keep_alive(KeepAlive::Disabled)
.client_request_timeout(Duration::from_secs(1))
.client_disconnect_timeout(Duration::from_secs(1))
.finish(|req: Request| {
assert!(req.peer_addr().is_some());
assert_eq!(req.version(), http::Version::HTTP_11);
ok::<_, Infallible>(Response::ok())
})
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
srv.stop().await;
}
#[derive(Debug, Display, Error)]
#[display("expect failed")]
struct ExpectFailed;
impl From<ExpectFailed> for Response<BoxBody> {
fn from(_: ExpectFailed) -> Self {
Response::new(StatusCode::EXPECTATION_FAILED)
}
}
#[actix_rt::test]
async fn expect_continue() {
let mut srv = test_server(|| {
HttpService::build()
.expect(fn_service(|req: Request| {
if req.head().uri.query() == Some("yes=") {
ok(req)
} else {
err(ExpectFailed)
}
}))
.finish(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test HTTP/1.1\r\nexpect: 100-continue\r\n\r\n");
let mut data = String::new();
let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 417 Expectation Failed\r\ncontent-length"));
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test?yes= HTTP/1.1\r\nexpect: 100-continue\r\n\r\n");
let mut data = String::new();
let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n"));
srv.stop().await;
}
#[actix_rt::test]
async fn expect_continue_h1() {
let mut srv = test_server(|| {
HttpService::build()
.expect(fn_service(|req: Request| {
sleep(Duration::from_millis(20)).then(move |_| {
if req.head().uri.query() == Some("yes=") {
ok(req)
} else {
err(ExpectFailed)
}
})
}))
.h1(fn_service(|_| ok::<_, Infallible>(Response::ok())))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test HTTP/1.1\r\nexpect: 100-continue\r\n\r\n");
let mut data = String::new();
let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 417 Expectation Failed\r\ncontent-length"));
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test?yes= HTTP/1.1\r\nexpect: 100-continue\r\n\r\n");
let mut data = String::new();
let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n"));
srv.stop().await;
}
#[actix_rt::test]
async fn chunked_payload() {
let chunk_sizes = [32768, 32, 32768];
let total_size: usize = chunk_sizes.iter().sum();
let mut srv = test_server(|| {
HttpService::build()
.h1(fn_service(|mut request: Request| {
request
.take_payload()
.map(|res| match res {
Ok(pl) => pl,
Err(err) => panic!("Error reading payload: {err}"),
})
.fold(0usize, |acc, chunk| ready(acc + chunk.len()))
.map(|req_size| {
Ok::<_, Error>(Response::ok().set_body(format!("size={}", req_size)))
})
}))
.tcp()
})
.await;
let returned_size = {
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"POST /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n");
for chunk_size in chunk_sizes.iter() {
let mut bytes = Vec::new();
let random_bytes = rand::rng()
.sample_iter(rand::distr::StandardUniform)
.take(*chunk_size)
.collect::<Vec<_>>();
bytes.extend(format!("{:X}\r\n", chunk_size).as_bytes());
bytes.extend(&random_bytes[..]);
bytes.extend(b"\r\n");
let _ = stream.write_all(&bytes);
}
let _ = stream.write_all(b"0\r\n\r\n");
stream.shutdown(net::Shutdown::Write).unwrap();
let mut data = String::new();
let _ = stream.read_to_string(&mut data);
let re = Regex::new(r"size=(\d+)").unwrap();
let size: usize = match re.captures(&data) {
Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(),
None => panic!("Failed to find size in HTTP Response: {}", data),
};
size
};
assert_eq!(returned_size, total_size);
srv.stop().await;
}
#[actix_rt::test]
async fn slow_request_408() {
let mut srv = test_server(|| {
HttpService::build()
.client_request_timeout(Duration::from_millis(200))
.keep_alive(Duration::from_secs(2))
.finish(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let start = Instant::now();
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test HTTP/1.1\r\n");
let mut data = String::new();
let _ = stream.read_to_string(&mut data);
assert!(
data.starts_with("HTTP/1.1 408 Request Timeout"),
"response was not 408: {}",
data
);
let diff = start.elapsed();
if diff < Duration::from_secs(1) {
// test success
} else if diff < Duration::from_secs(3) {
panic!("request seems to have wrongly timed-out according to keep-alive");
} else {
panic!("request took way too long to time out");
}
srv.stop().await;
}
#[actix_rt::test]
async fn http1_malformed_request() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test/tests/test HTTP1.1\r\n");
let mut data = String::new();
let _ = stream.read_to_string(&mut data);
assert!(data.starts_with("HTTP/1.1 400 Bad Request"));
srv.stop().await;
}
#[actix_rt::test]
async fn http1_keepalive() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test/tests/test HTTP/1.1\r\n\r\n");
let mut data = vec![0; 1024];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
let _ = stream.write_all(b"GET /test/tests/test HTTP/1.1\r\n\r\n");
let mut data = vec![0; 1024];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
srv.stop().await;
}
#[actix_rt::test]
async fn http1_keepalive_timeout() {
let mut srv = test_server(|| {
HttpService::build()
.keep_alive(Duration::from_secs(1))
.h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test HTTP/1.1\r\n\r\n");
let mut data = vec![0; 256];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
thread::sleep(Duration::from_millis(1100));
let mut data = vec![0; 256];
let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0);
srv.stop().await;
}
#[actix_rt::test]
async fn http1_keepalive_close() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test/tests/test HTTP/1.1\r\nconnection: close\r\n\r\n");
let mut data = vec![0; 1024];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0);
srv.stop().await;
}
#[actix_rt::test]
async fn http10_keepalive_default_close() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test/tests/test HTTP/1.0\r\n\r\n");
let mut data = vec![0; 1024];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.0 200 OK\r\n");
let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0);
srv.stop().await;
}
#[actix_rt::test]
async fn http10_keepalive() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test/tests/test HTTP/1.0\r\nconnection: keep-alive\r\n\r\n");
let mut data = vec![0; 1024];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.0 200 OK\r\n");
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test/tests/test HTTP/1.0\r\n\r\n");
let mut data = vec![0; 1024];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.0 200 OK\r\n");
let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0);
srv.stop().await;
}
#[actix_rt::test]
async fn http1_keepalive_disabled() {
let mut srv = test_server(|| {
HttpService::build()
.keep_alive(KeepAlive::Disabled)
.h1(|_| ok::<_, Infallible>(Response::ok()))
.tcp()
})
.await;
let mut stream = net::TcpStream::connect(srv.addr()).unwrap();
let _ = stream.write_all(b"GET /test/tests/test HTTP/1.1\r\n\r\n");
let mut data = vec![0; 1024];
let _ = stream.read(&mut data);
assert_eq!(&data[..17], b"HTTP/1.1 200 OK\r\n");
let mut data = vec![0; 1024];
let res = stream.read(&mut data).unwrap();
assert_eq!(res, 0);
srv.stop().await;
}
#[actix_rt::test]
async fn content_length() {
use actix_http::{
header::{HeaderName, HeaderValue},
StatusCode,
};
let mut srv = test_server(|| {
HttpService::build()
.h1(|req: Request| {
let idx: usize = req.uri().path()[1..].parse().unwrap();
let statuses = [
StatusCode::NO_CONTENT,
StatusCode::CONTINUE,
StatusCode::SWITCHING_PROTOCOLS,
StatusCode::PROCESSING,
StatusCode::OK,
StatusCode::NOT_FOUND,
];
ok::<_, Infallible>(Response::new(statuses[idx]))
})
.tcp()
})
.await;
let header = HeaderName::from_static("content-length");
let value = HeaderValue::from_static("0");
{
for i in 0..4 {
let req = srv.request(http::Method::GET, srv.url(&format!("/{}", i)));
let response = req.send().await.unwrap();
assert_eq!(response.headers().get(&header), None);
let req = srv.request(http::Method::HEAD, srv.url(&format!("/{}", i)));
let response = req.send().await.unwrap();
assert_eq!(response.headers().get(&header), None);
}
for i in 4..6 {
let req = srv.request(http::Method::GET, srv.url(&format!("/{}", i)));
let response = req.send().await.unwrap();
assert_eq!(response.headers().get(&header), Some(&value));
}
}
srv.stop().await;
}
#[actix_rt::test]
async fn h1_headers() {
let data = STR.repeat(10);
let data2 = data.clone();
let mut srv = test_server(move || {
let data = data.clone();
HttpService::build()
.h1(move |_| {
let mut builder = Response::build(StatusCode::OK);
for idx in 0..90 {
builder.insert_header((
format!("X-TEST-{}", idx).as_str(),
"TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST ",
));
}
ok::<_, Infallible>(builder.body(data.clone()))
})
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from(data2));
srv.stop().await;
}
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn h1_body() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
}
#[actix_rt::test]
async fn h1_head_empty() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.tcp()
})
.await;
let response = srv.head("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response
.headers()
.get(http::header::CONTENT_LENGTH)
.unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
srv.stop().await;
}
#[actix_rt::test]
async fn h1_head_binary() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.tcp()
})
.await;
let response = srv.head("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response
.headers()
.get(http::header::CONTENT_LENGTH)
.unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
srv.stop().await;
}
#[actix_rt::test]
async fn h1_head_binary2() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.tcp()
})
.await;
let response = srv.head("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response
.headers()
.get(http::header::CONTENT_LENGTH)
.unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
srv.stop().await;
}
#[actix_rt::test]
async fn h1_body_length() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| {
let body = once(ok::<_, Infallible>(Bytes::from_static(STR.as_ref())));
ok::<_, Infallible>(
Response::ok().set_body(SizedStream::new(STR.len() as u64, body)),
)
})
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
}
#[actix_rt::test]
async fn h1_body_chunked_explicit() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| {
let body = once(ok::<_, Error>(Bytes::from_static(STR.as_ref())));
ok::<_, Infallible>(
Response::build(StatusCode::OK)
.insert_header((header::TRANSFER_ENCODING, "chunked"))
.body(BodyStream::new(body)),
)
})
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
assert_eq!(
response
.headers()
.get(header::TRANSFER_ENCODING)
.unwrap()
.to_str()
.unwrap(),
"chunked"
);
// read response
let bytes = srv.load_body(response).await.unwrap();
// decode
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
}
#[actix_rt::test]
async fn h1_body_chunked_implicit() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| {
let body = once(ok::<_, Error>(Bytes::from_static(STR.as_ref())));
ok::<_, Infallible>(Response::build(StatusCode::OK).body(BodyStream::new(body)))
})
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
assert_eq!(
response
.headers()
.get(header::TRANSFER_ENCODING)
.unwrap()
.to_str()
.unwrap(),
"chunked"
);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
srv.stop().await;
}
#[actix_rt::test]
async fn h1_response_http_error_handling() {
let mut srv = test_server(|| {
HttpService::build()
.h1(fn_service(|_| {
let broken_header = Bytes::from_static(b"\0\0\0");
ok::<_, Infallible>(
Response::build(StatusCode::OK)
.insert_header((http::header::CONTENT_TYPE, broken_header))
.body(STR),
)
}))
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert_eq!(response.status(), http::StatusCode::INTERNAL_SERVER_ERROR);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(
bytes,
Bytes::from_static(b"error processing HTTP: failed to parse header value")
);
srv.stop().await;
}
#[derive(Debug, Display, Error)]
#[display("error")]
struct BadRequest;
impl From<BadRequest> for Response<BoxBody> {
fn from(_: BadRequest) -> Self {
Response::bad_request().set_body(BoxBody::new("error"))
}
}
#[actix_rt::test]
async fn h1_service_error() {
let mut srv = test_server(|| {
HttpService::build()
.h1(|_| err::<Response<()>, _>(BadRequest))
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert_eq!(response.status(), http::StatusCode::BAD_REQUEST);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(b"error"));
srv.stop().await;
}
#[actix_rt::test]
async fn h1_on_connect() {
let mut srv = test_server(|| {
HttpService::build()
.on_connect_ext(|_, data| {
data.insert(20isize);
})
.h1(|req: Request| {
assert!(req.conn_data::<isize>().is_some());
ok::<_, Infallible>(Response::ok())
})
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
srv.stop().await;
}
/// Tests compliance with 304 Not Modified spec in RFC 7232 §4.1.
/// https://datatracker.ietf.org/doc/html/rfc7232#section-4.1
#[actix_rt::test]
async fn not_modified_spec_h1() {
// TODO: this test needing a few seconds to complete reveals some weirdness with either the
// dispatcher or the client, though similar hangs occur on other tests in this file, only
// succeeding, it seems, because of the keepalive timer
static CL: header::HeaderName = header::CONTENT_LENGTH;
let mut srv = test_server(|| {
HttpService::build()
.h1(|req: Request| {
let res: Response<BoxBody> = match req.path() {
// with no content-length
"/none" => Response::with_body(StatusCode::NOT_MODIFIED, body::None::new())
.map_into_boxed_body(),
// with no content-length
"/body" => {
Response::with_body(StatusCode::NOT_MODIFIED, "1234").map_into_boxed_body()
}
// with manual content-length header and specific None body
"/cl-none" => {
let mut res =
Response::with_body(StatusCode::NOT_MODIFIED, body::None::new());
res.headers_mut()
.insert(CL.clone(), header::HeaderValue::from_static("24"));
res.map_into_boxed_body()
}
// with manual content-length header and ignore-able body
"/cl-body" => {
let mut res = Response::with_body(StatusCode::NOT_MODIFIED, "1234");
res.headers_mut()
.insert(CL.clone(), header::HeaderValue::from_static("4"));
res.map_into_boxed_body()
}
_ => panic!("unknown route"),
};
ok::<_, Infallible>(res)
})
.tcp()
})
.await;
let res = srv.get("/none").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(res.headers().get(&CL), None);
assert!(srv.load_body(res).await.unwrap().is_empty());
let res = srv.get("/body").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(res.headers().get(&CL), None);
assert!(srv.load_body(res).await.unwrap().is_empty());
let res = srv.get("/cl-none").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(
res.headers().get(&CL),
Some(&header::HeaderValue::from_static("24")),
);
assert!(srv.load_body(res).await.unwrap().is_empty());
let res = srv.get("/cl-body").send().await.unwrap();
assert_eq!(res.status(), http::StatusCode::NOT_MODIFIED);
assert_eq!(
res.headers().get(&CL),
Some(&header::HeaderValue::from_static("4")),
);
// server does not prevent payload from being sent but clients may choose not to read it
// TODO: this is probably a bug in the client, especially since CL header can differ in length
// from the body
assert!(!srv.load_body(res).await.unwrap().is_empty());
// TODO: add stream response tests
srv.stop().await;
}
#[actix_rt::test]
async fn h2c_auto() {
let mut srv = test_server(|| {
HttpService::build()
.keep_alive(KeepAlive::Disabled)
.finish(|req: Request| {
let body = match req.version() {
Version::HTTP_11 => "h1",
Version::HTTP_2 => "h2",
_ => unreachable!(),
};
ok::<_, Infallible>(Response::ok().set_body(body))
})
.tcp_auto_h2c()
})
.await;
let req = srv.get("/");
assert_eq!(req.get_version(), &Version::HTTP_11);
let mut res = req.send().await.unwrap();
assert!(res.status().is_success());
assert_eq!(res.body().await.unwrap(), &b"h1"[..]);
// awc doesn't support forcing the version to http/2 so use h2 manually
let tcp = TcpStream::connect(srv.addr()).await.unwrap();
let (h2, connection) = h2::client::handshake(tcp).await.unwrap();
tokio::spawn(async move { connection.await.unwrap() });
let mut h2 = h2.ready().await.unwrap();
let request = ::http::Request::new(());
let (response, _) = h2.send_request(request, true).unwrap();
let (head, mut body) = response.await.unwrap().into_parts();
let body = body.data().await.unwrap().unwrap();
assert!(head.status.is_success());
assert_eq!(body, &b"h2"[..]);
srv.stop().await;
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/tests/test_client.rs | actix-http/tests/test_client.rs | use std::convert::Infallible;
use actix_http::{body::BoxBody, HttpMessage, HttpService, Request, Response, StatusCode};
use actix_http_test::test_server;
use actix_service::ServiceFactoryExt;
use actix_utils::future;
use bytes::Bytes;
use derive_more::{Display, Error};
use futures_util::StreamExt as _;
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn h1_v2() {
let srv = test_server(move || {
HttpService::build()
.finish(|_| future::ok::<_, Infallible>(Response::ok().set_body(STR)))
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
let request = srv.get("/").insert_header(("x-test", "111")).send();
let mut response = request.await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = response.body().await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
let mut response = srv.post("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = response.body().await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn connection_close() {
let srv = test_server(move || {
HttpService::build()
.finish(|_| future::ok::<_, Infallible>(Response::ok().set_body(STR)))
.tcp()
.map(|_| ())
})
.await;
let response = srv.get("/").force_close().send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn with_query_parameter() {
let srv = test_server(move || {
HttpService::build()
.finish(|req: Request| async move {
if req.uri().query().unwrap().contains("qp=") {
Ok::<_, Infallible>(Response::ok())
} else {
Ok(Response::bad_request())
}
})
.tcp()
.map(|_| ())
})
.await;
let request = srv.request(http::Method::GET, srv.url("/?qp=5"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
#[derive(Debug, Display, Error)]
#[display("expect failed")]
struct ExpectFailed;
impl From<ExpectFailed> for Response<BoxBody> {
fn from(_: ExpectFailed) -> Self {
Response::new(StatusCode::EXPECTATION_FAILED)
}
}
#[actix_rt::test]
async fn h1_expect() {
let srv = test_server(move || {
HttpService::build()
.expect(|req: Request| async {
if req.headers().contains_key("AUTH") {
Ok(req)
} else {
Err(ExpectFailed)
}
})
.h1(|req: Request| async move {
let (_, mut body) = req.into_parts();
let mut buf = Vec::new();
while let Some(Ok(chunk)) = body.next().await {
buf.extend_from_slice(&chunk);
}
let str = std::str::from_utf8(&buf).unwrap();
assert_eq!(str, "expect body");
Ok::<_, Infallible>(Response::ok())
})
.tcp()
})
.await;
// test expect without payload.
let request = srv
.request(http::Method::GET, srv.url("/"))
.insert_header(("Expect", "100-continue"));
let response = request.send().await;
assert!(response.is_err());
// test expect would fail to continue
let request = srv
.request(http::Method::GET, srv.url("/"))
.insert_header(("Expect", "100-continue"));
let response = request.send_body("expect body").await.unwrap();
assert_eq!(response.status(), StatusCode::EXPECTATION_FAILED);
// test expect would continue
let request = srv
.request(http::Method::GET, srv.url("/"))
.insert_header(("Expect", "100-continue"))
.insert_header(("AUTH", "996"));
let response = request.send_body("expect body").await.unwrap();
assert!(response.status().is_success());
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/tests/test_ws.rs | actix-http/tests/test_ws.rs | use std::{
cell::Cell,
convert::Infallible,
task::{Context, Poll},
};
use actix_codec::{AsyncRead, AsyncWrite, Framed};
use actix_http::{
body::{BodySize, BoxBody},
h1,
ws::{self, CloseCode, Frame, Item, Message},
Error, HttpService, Request, Response,
};
use actix_http_test::test_server;
use actix_service::{fn_factory, Service};
use bytes::Bytes;
use derive_more::{Display, Error, From};
use futures_core::future::LocalBoxFuture;
use futures_util::{SinkExt as _, StreamExt as _};
#[derive(Clone)]
struct WsService(Cell<bool>);
impl WsService {
fn new() -> Self {
WsService(Cell::new(false))
}
fn set_polled(&self) {
self.0.set(true);
}
fn was_polled(&self) -> bool {
self.0.get()
}
}
#[derive(Debug, Display, Error, From)]
enum WsServiceError {
#[display("HTTP error")]
Http(actix_http::Error),
#[display("WS handshake error")]
Ws(actix_http::ws::HandshakeError),
#[display("I/O error")]
Io(std::io::Error),
#[display("dispatcher error")]
Dispatcher,
}
impl From<WsServiceError> for Response<BoxBody> {
fn from(err: WsServiceError) -> Self {
match err {
WsServiceError::Http(err) => err.into(),
WsServiceError::Ws(err) => err.into(),
WsServiceError::Io(_err) => unreachable!(),
WsServiceError::Dispatcher => {
Response::internal_server_error().set_body(BoxBody::new(format!("{}", err)))
}
}
}
}
impl<T> Service<(Request, Framed<T, h1::Codec>)> for WsService
where
T: AsyncRead + AsyncWrite + Unpin + 'static,
{
type Response = ();
type Error = WsServiceError;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
fn poll_ready(&self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.set_polled();
Poll::Ready(Ok(()))
}
fn call(&self, (req, mut framed): (Request, Framed<T, h1::Codec>)) -> Self::Future {
assert!(self.was_polled());
Box::pin(async move {
let res = ws::handshake(req.head())?.message_body(())?;
framed.send((res, BodySize::None).into()).await?;
let framed = framed.replace_codec(ws::Codec::new());
ws::Dispatcher::with(framed, service)
.await
.map_err(|_| WsServiceError::Dispatcher)?;
Ok(())
})
}
}
async fn service(msg: Frame) -> Result<Message, Error> {
let msg = match msg {
Frame::Ping(msg) => Message::Pong(msg),
Frame::Text(text) => Message::Text(String::from_utf8_lossy(&text).into_owned().into()),
Frame::Binary(bin) => Message::Binary(bin),
Frame::Continuation(item) => Message::Continuation(item),
Frame::Close(reason) => Message::Close(reason),
_ => return Err(ws::ProtocolError::BadOpCode.into()),
};
Ok(msg)
}
#[actix_rt::test]
async fn simple() {
let mut srv = test_server(|| {
HttpService::build()
.upgrade(fn_factory(|| async {
Ok::<_, Infallible>(WsService::new())
}))
.finish(|_| async { Ok::<_, Infallible>(Response::not_found()) })
.tcp()
})
.await;
// client service
let mut framed = srv.ws().await.unwrap();
framed.send(Message::Text("text".into())).await.unwrap();
let item = framed.next().await.unwrap().unwrap();
assert_eq!(item, Frame::Text(Bytes::from_static(b"text")));
framed.send(Message::Binary("text".into())).await.unwrap();
let item = framed.next().await.unwrap().unwrap();
assert_eq!(item, Frame::Binary(Bytes::from_static(&b"text"[..])));
framed.send(Message::Ping("text".into())).await.unwrap();
let item = framed.next().await.unwrap().unwrap();
assert_eq!(item, Frame::Pong("text".to_string().into()));
framed
.send(Message::Continuation(Item::FirstText("text".into())))
.await
.unwrap();
let item = framed.next().await.unwrap().unwrap();
assert_eq!(
item,
Frame::Continuation(Item::FirstText(Bytes::from_static(b"text")))
);
assert!(framed
.send(Message::Continuation(Item::FirstText("text".into())))
.await
.is_err());
assert!(framed
.send(Message::Continuation(Item::FirstBinary("text".into())))
.await
.is_err());
framed
.send(Message::Continuation(Item::Continue("text".into())))
.await
.unwrap();
let item = framed.next().await.unwrap().unwrap();
assert_eq!(
item,
Frame::Continuation(Item::Continue(Bytes::from_static(b"text")))
);
framed
.send(Message::Continuation(Item::Last("text".into())))
.await
.unwrap();
let item = framed.next().await.unwrap().unwrap();
assert_eq!(
item,
Frame::Continuation(Item::Last(Bytes::from_static(b"text")))
);
assert!(framed
.send(Message::Continuation(Item::Continue("text".into())))
.await
.is_err());
assert!(framed
.send(Message::Continuation(Item::Last("text".into())))
.await
.is_err());
framed
.send(Message::Close(Some(CloseCode::Normal.into())))
.await
.unwrap();
let item = framed.next().await.unwrap().unwrap();
assert_eq!(item, Frame::Close(Some(CloseCode::Normal.into())));
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/tests/test_rustls.rs | actix-http/tests/test_rustls.rs | #![cfg(feature = "rustls-0_23")]
extern crate tls_rustls_023 as rustls;
use std::{
convert::Infallible,
io::{self, Write},
net::{SocketAddr, TcpStream as StdTcpStream},
sync::Arc,
task::Poll,
time::Duration,
};
use actix_http::{
body::{BodyStream, BoxBody, SizedStream},
error::PayloadError,
header::{self, HeaderName, HeaderValue},
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
};
use actix_http_test::test_server;
use actix_rt::pin;
use actix_service::{fn_factory_with_config, fn_service};
use actix_tls::connect::rustls_0_23::webpki_roots_cert_store;
use actix_utils::future::{err, ok, poll_fn};
use bytes::{Bytes, BytesMut};
use derive_more::{Display, Error};
use futures_core::{ready, Stream};
use futures_util::stream::once;
use rustls::{pki_types::ServerName, ServerConfig as RustlsServerConfig};
use rustls_pki_types::{PrivateKeyDer, PrivatePkcs8KeyDer};
async fn load_body<S>(stream: S) -> Result<BytesMut, PayloadError>
where
S: Stream<Item = Result<Bytes, PayloadError>>,
{
let mut buf = BytesMut::new();
pin!(stream);
poll_fn(|cx| loop {
let body = stream.as_mut();
match ready!(body.poll_next(cx)) {
Some(Ok(bytes)) => buf.extend_from_slice(&bytes),
None => return Poll::Ready(Ok(())),
Some(Err(err)) => return Poll::Ready(Err(err)),
}
})
.await?;
Ok(buf)
}
fn tls_config() -> RustlsServerConfig {
let rcgen::CertifiedKey { cert, key_pair } =
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let cert_chain = vec![cert.der().clone()];
let key_der = PrivateKeyDer::Pkcs8(PrivatePkcs8KeyDer::from(key_pair.serialize_der()));
let mut config = RustlsServerConfig::builder()
.with_no_client_auth()
.with_single_cert(cert_chain, key_der)
.unwrap();
config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec());
config.alpn_protocols.push(H2_ALPN_PROTOCOL.to_vec());
config
}
pub fn get_negotiated_alpn_protocol(
addr: SocketAddr,
client_alpn_protocol: &[u8],
) -> Option<Vec<u8>> {
let mut config = rustls::ClientConfig::builder()
.with_root_certificates(webpki_roots_cert_store())
.with_no_client_auth();
config.alpn_protocols.push(client_alpn_protocol.to_vec());
let mut sess =
rustls::ClientConnection::new(Arc::new(config), ServerName::try_from("localhost").unwrap())
.unwrap();
let mut sock = StdTcpStream::connect(addr).unwrap();
let mut stream = rustls::Stream::new(&mut sess, &mut sock);
// The handshake will fails because the client will not be able to verify the server
// certificate, but it doesn't matter here as we are just interested in the negotiated ALPN
// protocol
let _ = stream.flush();
sess.alpn_protocol().map(|proto| proto.to_vec())
}
#[actix_rt::test]
async fn h1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h1(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn h2() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn h1_1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h1(|req: Request| {
assert!(req.peer_addr().is_some());
assert_eq!(req.version(), Version::HTTP_11);
ok::<_, Error>(Response::ok())
})
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn h2_1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.finish(|req: Request| {
assert!(req.peer_addr().is_some());
assert_eq!(req.version(), Version::HTTP_2);
ok::<_, Error>(Response::ok())
})
.rustls_0_23_with_config(
tls_config(),
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
)
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn h2_body1() -> io::Result<()> {
let data = "HELLOWORLD".to_owned().repeat(64 * 1024);
let mut srv = test_server(move || {
HttpService::build()
.h2(|mut req: Request<_>| async move {
let body = load_body(req.take_payload()).await?;
Ok::<_, Error>(Response::ok().set_body(body))
})
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send_body(data.clone()).await.unwrap();
assert!(response.status().is_success());
let body = srv.load_body(response).await.unwrap();
assert_eq!(&body, data.as_bytes());
Ok(())
}
#[actix_rt::test]
async fn h2_content_length() {
let srv = test_server(move || {
HttpService::build()
.h2(|req: Request| {
let indx: usize = req.uri().path()[1..].parse().unwrap();
let statuses = [
StatusCode::CONTINUE,
StatusCode::NO_CONTENT,
StatusCode::OK,
StatusCode::NOT_FOUND,
];
ok::<_, Infallible>(Response::new(statuses[indx]))
})
.rustls_0_23(tls_config())
})
.await;
let header = HeaderName::from_static("content-length");
let value = HeaderValue::from_static("0");
{
#[allow(clippy::single_element_loop)]
for &i in &[0] {
let req = srv
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
.send();
let _response = req.await.expect_err("should timeout on recv 1xx frame");
// assert_eq!(response.headers().get(&header), None);
let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i)))
.send();
let _response = req.await.expect_err("should timeout on recv 1xx frame");
// assert_eq!(response.headers().get(&header), None);
}
#[allow(clippy::single_element_loop)]
for &i in &[1] {
let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i)))
.send();
let response = req.await.unwrap();
assert_eq!(response.headers().get(&header), None);
}
for &i in &[2, 3] {
let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i)))
.send();
let response = req.await.unwrap();
assert_eq!(response.headers().get(&header), Some(&value));
}
}
}
#[actix_rt::test]
async fn h2_headers() {
let data = STR.repeat(10);
let data2 = data.clone();
let mut srv = test_server(move || {
let data = data.clone();
HttpService::build()
.h2(move |_| {
let mut config = Response::build(StatusCode::OK);
for idx in 0..90 {
config.insert_header((
format!("X-TEST-{}", idx).as_str(),
"TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST ",
));
}
ok::<_, Infallible>(config.body(data.clone()))
})
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from(data2));
}
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn h2_body2() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn h2_head_empty() {
let mut srv = test_server(move || {
HttpService::build()
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
assert_eq!(response.version(), Version::HTTP_2);
{
let len = response
.headers()
.get(http::header::CONTENT_LENGTH)
.unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
}
#[actix_rt::test]
async fn h2_head_binary() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response
.headers()
.get(http::header::CONTENT_LENGTH)
.unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
}
#[actix_rt::test]
async fn h2_head_binary2() {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.rustls_0_23(tls_config())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response
.headers()
.get(http::header::CONTENT_LENGTH)
.unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
}
#[actix_rt::test]
async fn h2_body_length() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
let body = once(ok::<_, Infallible>(Bytes::from_static(STR.as_ref())));
ok::<_, Infallible>(
Response::ok().set_body(SizedStream::new(STR.len() as u64, body)),
)
})
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn h2_body_chunked_explicit() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
let body = once(ok::<_, Error>(Bytes::from_static(STR.as_ref())));
ok::<_, Infallible>(
Response::build(StatusCode::OK)
.insert_header((header::TRANSFER_ENCODING, "chunked"))
.body(BodyStream::new(body)),
)
})
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
assert!(!response.headers().contains_key(header::TRANSFER_ENCODING));
// read response
let bytes = srv.load_body(response).await.unwrap();
// decode
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn h2_response_http_error_handling() {
let mut srv = test_server(move || {
HttpService::build()
.h2(fn_factory_with_config(|_: ()| {
ok::<_, Infallible>(fn_service(|_| {
let broken_header = Bytes::from_static(b"\0\0\0");
ok::<_, Infallible>(
Response::build(StatusCode::OK)
.insert_header((http::header::CONTENT_TYPE, broken_header))
.body(STR),
)
}))
}))
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert_eq!(response.status(), http::StatusCode::INTERNAL_SERVER_ERROR);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(
bytes,
Bytes::from_static(b"error processing HTTP: failed to parse header value")
);
}
#[derive(Debug, Display, Error)]
#[display("error")]
struct BadRequest;
impl From<BadRequest> for Response<BoxBody> {
fn from(_: BadRequest) -> Self {
Response::bad_request().set_body(BoxBody::new("error"))
}
}
#[actix_rt::test]
async fn h2_service_error() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| err::<Response<BoxBody>, _>(BadRequest))
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert_eq!(response.status(), http::StatusCode::BAD_REQUEST);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(b"error"));
}
#[actix_rt::test]
async fn h1_service_error() {
let mut srv = test_server(move || {
HttpService::build()
.h1(|_| err::<Response<BoxBody>, _>(BadRequest))
.rustls_0_23(tls_config())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert_eq!(response.status(), http::StatusCode::BAD_REQUEST);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(b"error"));
}
const H2_ALPN_PROTOCOL: &[u8] = b"h2";
const HTTP1_1_ALPN_PROTOCOL: &[u8] = b"http/1.1";
const CUSTOM_ALPN_PROTOCOL: &[u8] = b"custom";
#[actix_rt::test]
async fn alpn_h1() -> io::Result<()> {
let srv = test_server(move || {
let mut config = tls_config();
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build()
.h1(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(config)
})
.await;
assert_eq!(
get_negotiated_alpn_protocol(srv.addr(), CUSTOM_ALPN_PROTOCOL),
Some(CUSTOM_ALPN_PROTOCOL.to_vec())
);
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn alpn_h2() -> io::Result<()> {
let srv = test_server(move || {
let mut config = tls_config();
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(config)
})
.await;
assert_eq!(
get_negotiated_alpn_protocol(srv.addr(), H2_ALPN_PROTOCOL),
Some(H2_ALPN_PROTOCOL.to_vec())
);
assert_eq!(
get_negotiated_alpn_protocol(srv.addr(), CUSTOM_ALPN_PROTOCOL),
Some(CUSTOM_ALPN_PROTOCOL.to_vec())
);
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn alpn_h2_1() -> io::Result<()> {
let srv = test_server(move || {
let mut config = tls_config();
config.alpn_protocols.push(CUSTOM_ALPN_PROTOCOL.to_vec());
HttpService::build()
.finish(|_| ok::<_, Error>(Response::ok()))
.rustls_0_23(config)
})
.await;
assert_eq!(
get_negotiated_alpn_protocol(srv.addr(), H2_ALPN_PROTOCOL),
Some(H2_ALPN_PROTOCOL.to_vec())
);
assert_eq!(
get_negotiated_alpn_protocol(srv.addr(), HTTP1_1_ALPN_PROTOCOL),
Some(HTTP1_1_ALPN_PROTOCOL.to_vec())
);
assert_eq!(
get_negotiated_alpn_protocol(srv.addr(), CUSTOM_ALPN_PROTOCOL),
Some(CUSTOM_ALPN_PROTOCOL.to_vec())
);
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/tests/test_openssl.rs | actix-http/tests/test_openssl.rs | #![cfg(feature = "openssl")]
extern crate tls_openssl as openssl;
use std::{convert::Infallible, io, time::Duration};
use actix_http::{
body::{BodyStream, BoxBody, SizedStream},
error::PayloadError,
header::{self, HeaderValue},
Error, HttpService, Method, Request, Response, StatusCode, TlsAcceptorConfig, Version,
};
use actix_http_test::test_server;
use actix_service::{fn_service, ServiceFactoryExt};
use actix_utils::future::{err, ok, ready};
use bytes::{Bytes, BytesMut};
use derive_more::{Display, Error};
use futures_core::Stream;
use futures_util::{stream::once, StreamExt as _};
use openssl::{
pkey::PKey,
ssl::{SslAcceptor, SslMethod},
x509::X509,
};
async fn load_body<S>(stream: S) -> Result<BytesMut, PayloadError>
where
S: Stream<Item = Result<Bytes, PayloadError>>,
{
let body = stream
.map(|res| match res {
Ok(chunk) => chunk,
Err(_) => panic!(),
})
.fold(BytesMut::new(), move |mut body, chunk| {
body.extend_from_slice(&chunk);
ready(body)
})
.await;
Ok(body)
}
fn tls_config() -> SslAcceptor {
let rcgen::CertifiedKey { cert, key_pair } =
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let cert_file = cert.pem();
let key_file = key_pair.serialize_pem();
let cert = X509::from_pem(cert_file.as_bytes()).unwrap();
let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap();
let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();
builder.set_certificate(&cert).unwrap();
builder.set_private_key(&key).unwrap();
builder.set_alpn_select_callback(|_, protos| {
const H2: &[u8] = b"\x02h2";
if protos.windows(3).any(|window| window == H2) {
Ok(b"h2")
} else {
Err(openssl::ssl::AlpnError::NOACK)
}
});
builder.set_alpn_protos(b"\x02h2").unwrap();
builder.build()
}
#[actix_rt::test]
async fn h2() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Error>(Response::ok()))
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn h2_1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.finish(|req: Request| {
assert!(req.peer_addr().is_some());
assert_eq!(req.version(), Version::HTTP_2);
ok::<_, Error>(Response::ok())
})
.openssl_with_config(
tls_config(),
TlsAcceptorConfig::default().handshake_timeout(Duration::from_secs(5)),
)
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn h2_body() -> io::Result<()> {
let data = "HELLOWORLD".to_owned().repeat(64 * 1024); // 640 KiB
let mut srv = test_server(move || {
HttpService::build()
.h2(|mut req: Request<_>| async move {
let body = load_body(req.take_payload()).await?;
Ok::<_, Error>(Response::ok().set_body(body))
})
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send_body(data.clone()).await.unwrap();
assert!(response.status().is_success());
let body = srv.load_body(response).await.unwrap();
assert_eq!(&body, data.as_bytes());
Ok(())
}
#[actix_rt::test]
async fn h2_content_length() {
let srv = test_server(move || {
HttpService::build()
.h2(|req: Request| {
let idx: usize = req.uri().path()[1..].parse().unwrap();
let statuses = [
StatusCode::CONTINUE,
StatusCode::NO_CONTENT,
StatusCode::OK,
StatusCode::NOT_FOUND,
];
ok::<_, Infallible>(Response::new(statuses[idx]))
})
.openssl(tls_config())
.map_err(|_| ())
})
.await;
static VALUE: HeaderValue = HeaderValue::from_static("0");
{
let req = srv.request(Method::HEAD, srv.surl("/0")).send();
req.await.expect_err("should timeout on recv 1xx frame");
let req = srv.request(Method::GET, srv.surl("/0")).send();
req.await.expect_err("should timeout on recv 1xx frame");
let req = srv.request(Method::GET, srv.surl("/1")).send();
let response = req.await.unwrap();
assert!(response.headers().get("content-length").is_none());
for &i in &[2, 3] {
let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i)))
.send();
let response = req.await.unwrap();
assert_eq!(response.headers().get("content-length"), Some(&VALUE));
}
}
}
#[actix_rt::test]
async fn h2_headers() {
let data = STR.repeat(10);
let data2 = data.clone();
let mut srv = test_server(move || {
let data = data.clone();
HttpService::build()
.h2(move |_| {
let mut builder = Response::build(StatusCode::OK);
for idx in 0..90 {
builder.insert_header(
(format!("X-TEST-{}", idx).as_str(),
"TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST ",
));
}
ok::<_, Infallible>(builder.body(data.clone()))
})
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from(data2));
}
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn h2_body2() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn h2_head_empty() {
let mut srv = test_server(move || {
HttpService::build()
.finish(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
assert_eq!(response.version(), Version::HTTP_2);
{
let len = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
}
#[actix_rt::test]
async fn h2_head_binary() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
}
#[actix_rt::test]
async fn h2_head_binary2() {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Infallible>(Response::ok().set_body(STR)))
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
}
#[actix_rt::test]
async fn h2_body_length() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| async {
let body = once(async { Ok::<_, Infallible>(Bytes::from_static(STR.as_ref())) });
Ok::<_, Infallible>(
Response::ok().set_body(SizedStream::new(STR.len() as u64, body)),
)
})
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn h2_body_chunked_explicit() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
let body = once(ok::<_, Error>(Bytes::from_static(STR.as_ref())));
ok::<_, Infallible>(
Response::build(StatusCode::OK)
.insert_header((header::TRANSFER_ENCODING, "chunked"))
.body(BodyStream::new(body)),
)
})
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
assert!(!response.headers().contains_key(header::TRANSFER_ENCODING));
// read response
let bytes = srv.load_body(response).await.unwrap();
// decode
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn h2_response_http_error_handling() {
let mut srv = test_server(move || {
HttpService::build()
.h2(fn_service(|_| {
let broken_header = Bytes::from_static(b"\0\0\0");
ok::<_, Infallible>(
Response::build(StatusCode::OK)
.insert_header((header::CONTENT_TYPE, broken_header))
.body(STR),
)
}))
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(
bytes,
Bytes::from_static(b"error processing HTTP: failed to parse header value")
);
}
#[derive(Debug, Display, Error)]
#[display("error")]
struct BadRequest;
impl From<BadRequest> for Response<BoxBody> {
fn from(err: BadRequest) -> Self {
Response::build(StatusCode::BAD_REQUEST)
.body(err.to_string())
.map_into_boxed_body()
}
}
#[actix_rt::test]
async fn h2_service_error() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| err::<Response<BoxBody>, _>(BadRequest))
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(b"error"));
}
#[actix_rt::test]
async fn h2_on_connect() {
let srv = test_server(move || {
HttpService::build()
.on_connect_ext(|_, data| {
data.insert(20isize);
})
.h2(|req: Request| {
assert!(req.conn_data::<isize>().is_some());
ok::<_, Infallible>(Response::ok())
})
.openssl(tls_config())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/tests/test_h2_timer.rs | actix-http/tests/test_h2_timer.rs | use std::{io, time::Duration};
use actix_http::{error::Error, HttpService, Response};
use actix_server::Server;
use tokio::io::AsyncWriteExt;
#[actix_rt::test]
async fn h2_ping_pong() -> io::Result<()> {
let (tx, rx) = std::sync::mpsc::sync_channel(1);
let lst = std::net::TcpListener::bind("127.0.0.1:0")?;
let addr = lst.local_addr().unwrap();
let join = std::thread::spawn(move || {
actix_rt::System::new().block_on(async move {
let srv = Server::build()
.disable_signals()
.workers(1)
.listen("h2_ping_pong", lst, || {
HttpService::build()
.keep_alive(Duration::from_secs(3))
.h2(|_| async { Ok::<_, Error>(Response::ok()) })
.tcp()
})?
.run();
tx.send(srv.handle()).unwrap();
srv.await
})
});
let handle = rx.recv().unwrap();
let (sync_tx, rx) = std::sync::mpsc::sync_channel(1);
// use a separate thread for h2 client so it can be blocked.
std::thread::spawn(move || {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(async move {
let stream = tokio::net::TcpStream::connect(addr).await.unwrap();
let (mut tx, conn) = h2::client::handshake(stream).await.unwrap();
tokio::spawn(async move { conn.await.unwrap() });
let (res, _) = tx.send_request(::http::Request::new(()), true).unwrap();
let res = res.await.unwrap();
assert_eq!(res.status().as_u16(), 200);
sync_tx.send(()).unwrap();
// intentionally block the client thread so it can not answer ping pong.
std::thread::sleep(std::time::Duration::from_secs(1000));
})
});
rx.recv().unwrap();
let now = std::time::Instant::now();
// stop server gracefully. this step would take up to 30 seconds.
handle.stop(true).await;
// join server thread. only when connection are all gone this step would finish.
join.join().unwrap()?;
// check the time used for join server thread so it's known that the server shutdown
// is from keep alive and not server graceful shutdown timeout.
assert!(now.elapsed() < std::time::Duration::from_secs(30));
Ok(())
}
#[actix_rt::test]
async fn h2_handshake_timeout() -> io::Result<()> {
let (tx, rx) = std::sync::mpsc::sync_channel(1);
let lst = std::net::TcpListener::bind("127.0.0.1:0")?;
let addr = lst.local_addr().unwrap();
let join = std::thread::spawn(move || {
actix_rt::System::new().block_on(async move {
let srv = Server::build()
.disable_signals()
.workers(1)
.listen("h2_ping_pong", lst, || {
HttpService::build()
.keep_alive(Duration::from_secs(30))
// set first request timeout to 5 seconds.
// this is the timeout used for http2 handshake.
.client_request_timeout(Duration::from_secs(5))
.h2(|_| async { Ok::<_, Error>(Response::ok()) })
.tcp()
})?
.run();
tx.send(srv.handle()).unwrap();
srv.await
})
});
let handle = rx.recv().unwrap();
let (sync_tx, rx) = std::sync::mpsc::sync_channel(1);
// use a separate thread for tcp client so it can be blocked.
std::thread::spawn(move || {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(async move {
let mut stream = tokio::net::TcpStream::connect(addr).await.unwrap();
// do not send the last new line intentionally.
// This should hang the server handshake
let malicious_buf = b"PRI * HTTP/2.0\r\n\r\nSM\r\n";
stream.write_all(malicious_buf).await.unwrap();
stream.flush().await.unwrap();
sync_tx.send(()).unwrap();
// intentionally block the client thread so it sit idle and not do handshake.
std::thread::sleep(std::time::Duration::from_secs(1000));
drop(stream)
})
});
rx.recv().unwrap();
let now = std::time::Instant::now();
// stop server gracefully. this step would take up to 30 seconds.
handle.stop(true).await;
// join server thread. only when connection are all gone this step would finish.
join.join().unwrap()?;
// check the time used for join server thread so it's known that the server shutdown
// is from handshake timeout and not server graceful shutdown timeout.
assert!(now.elapsed() < std::time::Duration::from_secs(30));
Ok(())
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/benches/date-formatting.rs | actix-http/benches/date-formatting.rs | use std::time::SystemTime;
use actix_http::header::HttpDate;
use divan::{black_box, AllocProfiler, Bencher};
#[global_allocator]
static ALLOC: AllocProfiler = AllocProfiler::system();
#[divan::bench]
fn date_formatting(b: Bencher<'_, '_>) {
let now = SystemTime::now();
b.bench(|| {
black_box(HttpDate::from(black_box(now)).to_string());
})
}
fn main() {
divan::main();
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/benches/response-body-compression.rs | actix-http/benches/response-body-compression.rs | use std::convert::Infallible;
use actix_http::{encoding::Encoder, ContentEncoding, Request, Response, StatusCode};
use actix_service::{fn_service, Service as _};
use criterion::{black_box, criterion_group, criterion_main, Criterion};
static BODY: &[u8] = include_bytes!("../Cargo.toml");
fn compression_responses(c: &mut Criterion) {
let mut group = c.benchmark_group("compression responses");
group.bench_function("identity", |b| {
let rt = actix_rt::Runtime::new().unwrap();
let identity_svc = fn_service(|_: Request| async move {
let mut res = Response::with_body(StatusCode::OK, ());
let body = black_box(Encoder::response(
ContentEncoding::Identity,
res.head_mut(),
BODY,
));
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
});
b.iter(|| {
rt.block_on(identity_svc.call(Request::new())).unwrap();
});
});
group.bench_function("gzip", |b| {
let rt = actix_rt::Runtime::new().unwrap();
let identity_svc = fn_service(|_: Request| async move {
let mut res = Response::with_body(StatusCode::OK, ());
let body = black_box(Encoder::response(
ContentEncoding::Gzip,
res.head_mut(),
BODY,
));
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
});
b.iter(|| {
rt.block_on(identity_svc.call(Request::new())).unwrap();
});
});
group.bench_function("br", |b| {
let rt = actix_rt::Runtime::new().unwrap();
let identity_svc = fn_service(|_: Request| async move {
let mut res = Response::with_body(StatusCode::OK, ());
let body = black_box(Encoder::response(
ContentEncoding::Brotli,
res.head_mut(),
BODY,
));
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
});
b.iter(|| {
rt.block_on(identity_svc.call(Request::new())).unwrap();
});
});
group.bench_function("zstd", |b| {
let rt = actix_rt::Runtime::new().unwrap();
let identity_svc = fn_service(|_: Request| async move {
let mut res = Response::with_body(StatusCode::OK, ());
let body = black_box(Encoder::response(
ContentEncoding::Zstd,
res.head_mut(),
BODY,
));
Ok::<_, Infallible>(black_box(res.set_body(black_box(body))))
});
b.iter(|| {
rt.block_on(identity_svc.call(Request::new())).unwrap();
});
});
group.finish();
}
criterion_group!(benches, compression_responses);
criterion_main!(benches);
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/tls_rustls.rs | actix-http/examples/tls_rustls.rs | //! Demonstrates TLS configuration (via Rustls) for HTTP/1.1 and HTTP/2 connections.
//!
//! Test using cURL:
//!
//! ```console
//! $ curl --insecure https://127.0.0.1:8443
//! Hello World!
//! Protocol: HTTP/2.0
//!
//! $ curl --insecure --http1.1 https://127.0.0.1:8443
//! Hello World!
//! Protocol: HTTP/1.1
//! ```
extern crate tls_rustls_023 as rustls;
use std::io;
use actix_http::{Error, HttpService, Request, Response};
use actix_utils::future::ok;
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
tracing::info!("starting HTTP server at https://127.0.0.1:8443");
actix_server::Server::build()
.bind("echo", ("127.0.0.1", 8443), || {
HttpService::build()
.finish(|req: Request| {
let body = format!(
"Hello World!\n\
Protocol: {:?}",
req.head().version
);
ok::<_, Error>(Response::ok().set_body(body))
})
.rustls_0_23(rustls_config())
})?
.run()
.await
}
fn rustls_config() -> rustls::ServerConfig {
let rcgen::CertifiedKey { cert, key_pair } =
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let cert_chain = vec![cert.der().clone()];
let key_der = rustls_pki_types::PrivateKeyDer::Pkcs8(
rustls_pki_types::PrivatePkcs8KeyDer::from(key_pair.serialize_der()),
);
let mut config = rustls::ServerConfig::builder()
.with_no_client_auth()
.with_single_cert(cert_chain, key_der)
.unwrap();
const H1_ALPN: &[u8] = b"http/1.1";
const H2_ALPN: &[u8] = b"h2";
config.alpn_protocols.push(H2_ALPN.to_vec());
config.alpn_protocols.push(H1_ALPN.to_vec());
config
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/bench.rs | actix-http/examples/bench.rs | use std::{convert::Infallible, io, time::Duration};
use actix_http::{HttpService, Request, Response, StatusCode};
use actix_server::Server;
use once_cell::sync::Lazy;
static STR: Lazy<String> = Lazy::new(|| "HELLO WORLD ".repeat(20));
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("dispatcher-benchmark", ("127.0.0.1", 8080), || {
HttpService::build()
.client_request_timeout(Duration::from_secs(1))
.finish(|_: Request| async move {
let mut res = Response::build(StatusCode::OK);
Ok::<_, Infallible>(res.body(&**STR))
})
.tcp()
})?
// limiting number of workers so that bench client is not sharing as many resources
.workers(4)
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/h2spec.rs | actix-http/examples/h2spec.rs | use std::{convert::Infallible, io};
use actix_http::{HttpService, Request, Response, StatusCode};
use actix_server::Server;
use once_cell::sync::Lazy;
static STR: Lazy<String> = Lazy::new(|| "HELLO WORLD ".repeat(100));
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("h2spec", ("127.0.0.1", 8080), || {
HttpService::build()
.h2(|_: Request| async move {
let mut res = Response::build(StatusCode::OK);
Ok::<_, Infallible>(res.body(&**STR))
})
.tcp()
})?
.workers(4)
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/hello-world.rs | actix-http/examples/hello-world.rs | use std::{convert::Infallible, io, time::Duration};
use actix_http::{header::HeaderValue, HttpService, Request, Response, StatusCode};
use actix_server::Server;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("hello-world", ("127.0.0.1", 8080), || {
HttpService::build()
.client_request_timeout(Duration::from_secs(1))
.client_disconnect_timeout(Duration::from_secs(1))
.on_connect_ext(|_, ext| {
ext.insert(42u32);
})
.finish(|req: Request| async move {
info!("{req:?}");
let mut res = Response::build(StatusCode::OK);
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
let forty_two = req.conn_data::<u32>().unwrap().to_string();
res.insert_header(("x-forty-two", HeaderValue::from_str(&forty_two).unwrap()));
Ok::<_, Infallible>(res.body("Hello world!"))
})
.tcp()
})?
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/streaming-error.rs | actix-http/examples/streaming-error.rs | //! Example showing response body (chunked) stream erroring.
//!
//! Test using `nc` or `curl`.
//! ```sh
//! $ curl -vN 127.0.0.1:8080
//! $ echo 'GET / HTTP/1.1\n\n' | nc 127.0.0.1 8080
//! ```
use std::{convert::Infallible, io, time::Duration};
use actix_http::{body::BodyStream, HttpService, Response};
use actix_server::Server;
use async_stream::stream;
use bytes::Bytes;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("streaming-error", ("127.0.0.1", 8080), || {
HttpService::build()
.finish(|req| async move {
info!("{req:?}");
let res = Response::ok();
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
yield Ok(Bytes::from("123"));
yield Ok(Bytes::from("456"));
actix_rt::time::sleep(Duration::from_secs(1)).await;
yield Err(io::Error::other("abc"));
})))
})
.tcp()
})?
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/echo.rs | actix-http/examples/echo.rs | use std::{io, time::Duration};
use actix_http::{Error, HttpService, Request, Response, StatusCode};
use actix_server::Server;
use bytes::BytesMut;
use futures_util::StreamExt as _;
use http::header::HeaderValue;
use tracing::info;
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("echo", ("127.0.0.1", 8080), || {
HttpService::build()
.client_request_timeout(Duration::from_secs(1))
.client_disconnect_timeout(Duration::from_secs(1))
// handles HTTP/1.1 and HTTP/2
.finish(|mut req: Request| async move {
let mut body = BytesMut::new();
while let Some(item) = req.payload().next().await {
body.extend_from_slice(&item?);
}
info!("request body: {body:?}");
let res = Response::build(StatusCode::OK)
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
.body(body);
Ok::<_, Error>(res)
})
.tcp() // No TLS
})?
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/h2c-detect.rs | actix-http/examples/h2c-detect.rs | //! An example that supports automatic selection of plaintext h1/h2c connections.
//!
//! Notably, both the following commands will work.
//! ```console
//! $ curl --http1.1 'http://localhost:8080/'
//! $ curl --http2-prior-knowledge 'http://localhost:8080/'
//! ```
use std::{convert::Infallible, io};
use actix_http::{body::BodyStream, HttpService, Request, Response, StatusCode};
use actix_server::Server;
#[tokio::main(flavor = "current_thread")]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("h2c-detect", ("127.0.0.1", 8080), || {
HttpService::build()
.finish(|_req: Request| async move {
Ok::<_, Infallible>(Response::build(StatusCode::OK).body(BodyStream::new(
futures_util::stream::iter([
Ok::<_, String>("123".into()),
Err("wertyuikmnbvcxdfty6t".to_owned()),
]),
)))
})
.tcp_auto_h2c()
})?
.workers(2)
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/ws.rs | actix-http/examples/ws.rs | //! Sets up a WebSocket server over TCP and TLS.
//! Sends a heartbeat message every 4 seconds but does not respond to any incoming frames.
extern crate tls_rustls_023 as rustls;
use std::{
io,
pin::Pin,
task::{Context, Poll},
time::Duration,
};
use actix_http::{body::BodyStream, error::Error, ws, HttpService, Request, Response};
use actix_rt::time::{interval, Interval};
use actix_server::Server;
use bytes::{Bytes, BytesMut};
use bytestring::ByteString;
use futures_core::{ready, Stream};
use tokio_util::codec::Encoder;
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
Server::build()
.bind("tcp", ("127.0.0.1", 8080), || {
HttpService::build().h1(handler).tcp()
})?
.bind("tls", ("127.0.0.1", 8443), || {
HttpService::build()
.finish(handler)
.rustls_0_23(tls_config())
})?
.run()
.await
}
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
tracing::info!("handshaking");
let mut res = ws::handshake(req.head())?;
// handshake will always fail under HTTP/2
tracing::info!("responding");
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
}
struct Heartbeat {
codec: ws::Codec,
interval: Interval,
}
impl Heartbeat {
fn new(codec: ws::Codec) -> Self {
Self {
codec,
interval: interval(Duration::from_secs(4)),
}
}
}
impl Stream for Heartbeat {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
tracing::trace!("poll");
ready!(self.as_mut().interval.poll_tick(cx));
let mut buffer = BytesMut::new();
self.as_mut()
.codec
.encode(
ws::Message::Text(ByteString::from_static("hello world")),
&mut buffer,
)
.unwrap();
Poll::Ready(Some(Ok(buffer.freeze())))
}
}
fn tls_config() -> rustls::ServerConfig {
let rcgen::CertifiedKey { cert, key_pair } =
rcgen::generate_simple_self_signed(["localhost".to_owned()]).unwrap();
let cert_chain = vec![cert.der().clone()];
let key_der = rustls_pki_types::PrivateKeyDer::Pkcs8(
rustls_pki_types::PrivatePkcs8KeyDer::from(key_pair.serialize_der()),
);
let mut config = rustls::ServerConfig::builder()
.with_no_client_auth()
.with_single_cert(cert_chain, key_der)
.unwrap();
config.alpn_protocols.push(b"http/1.1".to_vec());
config.alpn_protocols.push(b"h2".to_vec());
config
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/echo2.rs | actix-http/examples/echo2.rs | use std::io;
use actix_http::{
body::{BodyStream, MessageBody},
header, Error, HttpMessage, HttpService, Request, Response, StatusCode,
};
async fn handle_request(mut req: Request) -> Result<Response<impl MessageBody>, Error> {
let mut res = Response::build(StatusCode::OK);
if let Some(ct) = req.headers().get(header::CONTENT_TYPE) {
res.insert_header((header::CONTENT_TYPE, ct));
}
// echo request payload stream as (chunked) response body
let res = res.message_body(BodyStream::new(req.payload().take()))?;
Ok(res)
}
#[actix_rt::main]
async fn main() -> io::Result<()> {
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
actix_server::Server::build()
.bind("echo", ("127.0.0.1", 8080), || {
HttpService::build()
// handles HTTP/1.1 only
.h1(handle_request)
// No TLS
.tcp()
})?
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http/examples/actix-web.rs | actix-http/examples/actix-web.rs | use actix_http::HttpService;
use actix_server::Server;
use actix_service::map_config;
use actix_web::{dev::AppConfig, get, App, Responder};
#[get("/")]
async fn index() -> impl Responder {
"Hello, world. From Actix Web!"
}
#[tokio::main(flavor = "current_thread")]
async fn main() -> std::io::Result<()> {
Server::build()
.bind("hello-world", "127.0.0.1:8080", || {
// construct actix-web app
let app = App::new().service(index);
HttpService::build()
// pass the app to service builder
// map_config is used to map App's configuration to ServiceBuilder
// h1 will configure server to only use HTTP/1.1
.h1(map_config(app, |_| AppConfig::default()))
.tcp()
})?
.run()
.await
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-http-test/src/lib.rs | actix-http-test/src/lib.rs | //! Various helpers for Actix applications to use during testing.
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[cfg(feature = "openssl")]
extern crate tls_openssl as openssl;
use std::{net, thread, time::Duration};
use actix_codec::{AsyncRead, AsyncWrite, Framed};
use actix_rt::{net::TcpStream, System};
use actix_server::{Server, ServerServiceFactory};
use awc::{
error::PayloadError, http::header::HeaderMap, ws, Client, ClientRequest, ClientResponse,
Connector,
};
use bytes::Bytes;
use futures_core::stream::Stream;
use http::Method;
use socket2::{Domain, Protocol, Socket, Type};
use tokio::sync::mpsc;
/// Start test server.
///
/// `TestServer` is very simple test server that simplify process of writing integration tests cases
/// for HTTP applications.
///
/// # Examples
///
/// ```
/// use actix_http::{HttpService, Response, Error, StatusCode};
/// use actix_http_test::test_server;
/// use actix_service::{fn_service, map_config, ServiceFactoryExt as _};
///
/// #[actix_rt::test]
/// # async fn hidden_test() {}
/// async fn test_example() {
/// let srv = test_server(|| {
/// HttpService::build()
/// .h1(fn_service(|req| async move {
/// Ok::<_, Error>(Response::ok())
/// }))
/// .tcp()
/// .map_err(|_| ())
/// })
/// .await;
///
/// let req = srv.get("/");
/// let response = req.send().await.unwrap();
///
/// assert_eq!(response.status(), StatusCode::OK);
/// }
/// # actix_rt::System::new().block_on(test_example());
/// ```
pub async fn test_server<F: ServerServiceFactory<TcpStream>>(factory: F) -> TestServer {
let tcp = net::TcpListener::bind("127.0.0.1:0").unwrap();
test_server_with_addr(tcp, factory).await
}
/// Start [`test server`](test_server()) on an existing address binding.
pub async fn test_server_with_addr<F: ServerServiceFactory<TcpStream>>(
tcp: net::TcpListener,
factory: F,
) -> TestServer {
let (started_tx, started_rx) = std::sync::mpsc::channel();
let (thread_stop_tx, thread_stop_rx) = mpsc::channel(1);
// run server in separate thread
thread::spawn(move || {
System::new().block_on(async move {
let local_addr = tcp.local_addr().unwrap();
let srv = Server::build()
.workers(1)
.disable_signals()
.system_exit()
.listen("test", tcp, factory)
.expect("test server could not be created");
let srv = srv.run();
started_tx
.send((System::current(), srv.handle(), local_addr))
.unwrap();
// drive server loop
srv.await.unwrap();
});
// notify TestServer that server and system have shut down
// all thread managed resources should be dropped at this point
#[allow(clippy::let_underscore_future)]
let _ = thread_stop_tx.send(());
});
let (system, server, addr) = started_rx.recv().unwrap();
let client = {
#[cfg(feature = "openssl")]
let connector = {
use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};
let mut builder = SslConnector::builder(SslMethod::tls()).unwrap();
builder.set_verify(SslVerifyMode::NONE);
let _ = builder
.set_alpn_protos(b"\x02h2\x08http/1.1")
.map_err(|err| log::error!("Can not set ALPN protocol: {err}"));
Connector::new()
.conn_lifetime(Duration::from_secs(0))
.timeout(Duration::from_millis(30000))
.openssl(builder.build())
};
#[cfg(not(feature = "openssl"))]
let connector = {
Connector::new()
.conn_lifetime(Duration::from_secs(0))
.timeout(Duration::from_millis(30000))
};
Client::builder().connector(connector).finish()
};
TestServer {
server,
client,
system,
addr,
thread_stop_rx,
}
}
/// Test server controller
pub struct TestServer {
server: actix_server::ServerHandle,
client: awc::Client,
system: actix_rt::System,
addr: net::SocketAddr,
thread_stop_rx: mpsc::Receiver<()>,
}
impl TestServer {
/// Construct test server url
pub fn addr(&self) -> net::SocketAddr {
self.addr
}
/// Construct test server url
pub fn url(&self, uri: &str) -> String {
if uri.starts_with('/') {
format!("http://localhost:{}{}", self.addr.port(), uri)
} else {
format!("http://localhost:{}/{}", self.addr.port(), uri)
}
}
/// Construct test HTTPS server URL.
pub fn surl(&self, uri: &str) -> String {
if uri.starts_with('/') {
format!("https://localhost:{}{}", self.addr.port(), uri)
} else {
format!("https://localhost:{}/{}", self.addr.port(), uri)
}
}
/// Create `GET` request
pub fn get<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.get(self.url(path.as_ref()).as_str())
}
/// Create HTTPS `GET` request
pub fn sget<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.get(self.surl(path.as_ref()).as_str())
}
/// Create `POST` request
pub fn post<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.post(self.url(path.as_ref()).as_str())
}
/// Create HTTPS `POST` request
pub fn spost<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.post(self.surl(path.as_ref()).as_str())
}
/// Create `HEAD` request
pub fn head<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.head(self.url(path.as_ref()).as_str())
}
/// Create HTTPS `HEAD` request
pub fn shead<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.head(self.surl(path.as_ref()).as_str())
}
/// Create `PUT` request
pub fn put<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.put(self.url(path.as_ref()).as_str())
}
/// Create HTTPS `PUT` request
pub fn sput<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.put(self.surl(path.as_ref()).as_str())
}
/// Create `PATCH` request
pub fn patch<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.patch(self.url(path.as_ref()).as_str())
}
/// Create HTTPS `PATCH` request
pub fn spatch<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.patch(self.surl(path.as_ref()).as_str())
}
/// Create `DELETE` request
pub fn delete<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.delete(self.url(path.as_ref()).as_str())
}
/// Create HTTPS `DELETE` request
pub fn sdelete<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.delete(self.surl(path.as_ref()).as_str())
}
/// Create `OPTIONS` request
pub fn options<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.options(self.url(path.as_ref()).as_str())
}
/// Create HTTPS `OPTIONS` request
pub fn soptions<S: AsRef<str>>(&self, path: S) -> ClientRequest {
self.client.options(self.surl(path.as_ref()).as_str())
}
/// Connect to test HTTP server
pub fn request<S: AsRef<str>>(&self, method: Method, path: S) -> ClientRequest {
self.client.request(method, path.as_ref())
}
pub async fn load_body<S>(
&mut self,
mut response: ClientResponse<S>,
) -> Result<Bytes, PayloadError>
where
S: Stream<Item = Result<Bytes, PayloadError>> + Unpin + 'static,
{
response.body().limit(10_485_760).await
}
/// Connect to WebSocket server at a given path.
pub async fn ws_at(
&mut self,
path: &str,
) -> Result<Framed<impl AsyncRead + AsyncWrite, ws::Codec>, awc::error::WsClientError> {
let url = self.url(path);
let connect = self.client.ws(url).connect();
connect.await.map(|(_, framed)| framed)
}
/// Connect to a WebSocket server.
pub async fn ws(
&mut self,
) -> Result<Framed<impl AsyncRead + AsyncWrite, ws::Codec>, awc::error::WsClientError> {
self.ws_at("/").await
}
/// Get default HeaderMap of Client.
///
/// Returns Some(&mut HeaderMap) when Client object is unique
/// (No other clone of client exists at the same time).
pub fn client_headers(&mut self) -> Option<&mut HeaderMap> {
self.client.headers()
}
/// Stop HTTP server.
///
/// Waits for spawned `Server` and `System` to (force) shutdown.
pub async fn stop(&mut self) {
// signal server to stop
self.server.stop(false).await;
// also signal system to stop
// though this is handled by `ServerBuilder::exit_system` too
self.system.stop();
// wait for thread to be stopped but don't care about result
let _ = self.thread_stop_rx.recv().await;
}
}
impl Drop for TestServer {
fn drop(&mut self) {
// calls in this Drop impl should be enough to shut down the server, system, and thread
// without needing to await anything
// signal server to stop
#[allow(clippy::let_underscore_future)]
let _ = self.server.stop(true);
// signal system to stop
self.system.stop();
}
}
/// Get a localhost socket address with random, unused port.
pub fn unused_addr() -> net::SocketAddr {
let addr: net::SocketAddr = "127.0.0.1:0".parse().unwrap();
let socket = Socket::new(Domain::IPV4, Type::STREAM, Some(Protocol::TCP)).unwrap();
socket.bind(&addr.into()).unwrap();
socket.set_reuse_address(true).unwrap();
let tcp = net::TcpListener::from(socket);
tcp.local_addr().unwrap()
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/src/lib.rs | actix-multipart-derive/src/lib.rs | //! Multipart form derive macro for Actix Web.
//!
//! See [`macro@MultipartForm`] for usage examples.
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![allow(clippy::disallowed_names)] // false positives in some macro expansions
use std::collections::HashSet;
use bytesize::ByteSize;
use darling::{FromDeriveInput, FromField, FromMeta};
use proc_macro::TokenStream;
use proc_macro2::Ident;
use quote::quote;
use syn::{parse_macro_input, Type};
#[derive(Default, FromMeta)]
enum DuplicateField {
#[default]
Ignore,
Deny,
Replace,
}
#[derive(FromDeriveInput, Default)]
#[darling(attributes(multipart), default)]
struct MultipartFormAttrs {
deny_unknown_fields: bool,
duplicate_field: DuplicateField,
}
#[allow(clippy::disallowed_names)] // false positive in macro expansion
#[derive(FromField, Default)]
#[darling(attributes(multipart), default)]
struct FieldAttrs {
rename: Option<String>,
limit: Option<String>,
}
struct ParsedField<'t> {
serialization_name: String,
rust_name: &'t Ident,
limit: Option<usize>,
ty: &'t Type,
}
/// Implements `MultipartCollect` for a struct so that it can be used with the `MultipartForm`
/// extractor.
///
/// # Basic Use
///
/// Each field type should implement the `FieldReader` trait:
///
/// ```
/// use actix_multipart::form::{tempfile::TempFile, text::Text, MultipartForm};
///
/// #[derive(MultipartForm)]
/// struct ImageUpload {
/// description: Text<String>,
/// timestamp: Text<i64>,
/// image: TempFile,
/// }
/// ```
///
/// # Optional and List Fields
///
/// You can also use `Vec<T>` and `Option<T>` provided that `T: FieldReader`.
///
/// A [`Vec`] field corresponds to an upload with multiple parts under the [same field
/// name](https://www.rfc-editor.org/rfc/rfc7578#section-4.3).
///
/// ```
/// use actix_multipart::form::{tempfile::TempFile, text::Text, MultipartForm};
///
/// #[derive(MultipartForm)]
/// struct Form {
/// category: Option<Text<String>>,
/// files: Vec<TempFile>,
/// }
/// ```
///
/// # Field Renaming
///
/// You can use the `#[multipart(rename = "foo")]` attribute to receive a field by a different name.
///
/// ```
/// use actix_multipart::form::{tempfile::TempFile, MultipartForm};
///
/// #[derive(MultipartForm)]
/// struct Form {
/// #[multipart(rename = "files[]")]
/// files: Vec<TempFile>,
/// }
/// ```
///
/// # Field Limits
///
/// You can use the `#[multipart(limit = "<size>")]` attribute to set field level limits. The limit
/// string is parsed using [`bytesize`].
///
/// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
///
/// ```
/// use actix_multipart::form::{tempfile::TempFile, text::Text, MultipartForm};
///
/// #[derive(MultipartForm)]
/// struct Form {
/// #[multipart(limit = "2 KiB")]
/// description: Text<String>,
///
/// #[multipart(limit = "512 MiB")]
/// files: Vec<TempFile>,
/// }
/// ```
///
/// # Unknown Fields
///
/// By default fields with an unknown name are ignored. They can be rejected using the
/// `#[multipart(deny_unknown_fields)]` attribute:
///
/// ```
/// # use actix_multipart::form::MultipartForm;
/// #[derive(MultipartForm)]
/// #[multipart(deny_unknown_fields)]
/// struct Form { }
/// ```
///
/// # Duplicate Fields
///
/// The behaviour for when multiple fields with the same name are received can be changed using the
/// `#[multipart(duplicate_field = "<behavior>")]` attribute:
///
/// - "ignore": (default) Extra fields are ignored. I.e., the first one is persisted.
/// - "deny": A `MultipartError::UnknownField` error response is returned.
/// - "replace": Each field is processed, but only the last one is persisted.
///
/// Note that `Vec` fields will ignore this option.
///
/// ```
/// # use actix_multipart::form::MultipartForm;
/// #[derive(MultipartForm)]
/// #[multipart(duplicate_field = "deny")]
/// struct Form { }
/// ```
///
/// [`bytesize`]: https://docs.rs/bytesize/2
#[proc_macro_derive(MultipartForm, attributes(multipart))]
pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: syn::DeriveInput = parse_macro_input!(input);
let name = &input.ident;
let data_struct = match &input.data {
syn::Data::Struct(data_struct) => data_struct,
_ => {
return compile_err(syn::Error::new(
input.ident.span(),
"`MultipartForm` can only be derived for structs",
))
}
};
let fields = match &data_struct.fields {
syn::Fields::Named(fields_named) => fields_named,
_ => {
return compile_err(syn::Error::new(
input.ident.span(),
"`MultipartForm` can only be derived for a struct with named fields",
))
}
};
let attrs = match MultipartFormAttrs::from_derive_input(&input) {
Ok(attrs) => attrs,
Err(err) => return err.write_errors().into(),
};
// Parse the field attributes
let parsed = match fields
.named
.iter()
.map(|field| {
let rust_name = field.ident.as_ref().unwrap();
let attrs = FieldAttrs::from_field(field).map_err(|err| err.write_errors())?;
let serialization_name = attrs.rename.unwrap_or_else(|| rust_name.to_string());
let limit = match attrs.limit.map(|limit| match limit.parse::<ByteSize>() {
Ok(ByteSize(size)) => Ok(usize::try_from(size).unwrap()),
Err(err) => Err(syn::Error::new(
field.ident.as_ref().unwrap().span(),
format!("Could not parse size limit `{}`: {}", limit, err),
)),
}) {
Some(Err(err)) => return Err(compile_err(err)),
limit => limit.map(Result::unwrap),
};
Ok(ParsedField {
serialization_name,
rust_name,
limit,
ty: &field.ty,
})
})
.collect::<Result<Vec<_>, TokenStream>>()
{
Ok(attrs) => attrs,
Err(err) => return err,
};
// Check that field names are unique
let mut set = HashSet::new();
for field in &parsed {
if !set.insert(field.serialization_name.clone()) {
return compile_err(syn::Error::new(
field.rust_name.span(),
format!("Multiple fields named: `{}`", field.serialization_name),
));
}
}
// Return value when a field name is not supported by the form
let unknown_field_result = if attrs.deny_unknown_fields {
quote!(::std::result::Result::Err(
::actix_multipart::MultipartError::UnknownField(field.name().unwrap().to_string())
))
} else {
quote!(::std::result::Result::Ok(()))
};
// Value for duplicate action
let duplicate_field = match attrs.duplicate_field {
DuplicateField::Ignore => quote!(::actix_multipart::form::DuplicateField::Ignore),
DuplicateField::Deny => quote!(::actix_multipart::form::DuplicateField::Deny),
DuplicateField::Replace => quote!(::actix_multipart::form::DuplicateField::Replace),
};
// limit() implementation
let mut limit_impl = quote!();
for field in &parsed {
let name = &field.serialization_name;
if let Some(value) = field.limit {
limit_impl.extend(quote!(
#name => ::std::option::Option::Some(#value),
));
}
}
// handle_field() implementation
let mut handle_field_impl = quote!();
for field in &parsed {
let name = &field.serialization_name;
let ty = &field.ty;
handle_field_impl.extend(quote!(
#name => ::std::boxed::Box::pin(
<#ty as ::actix_multipart::form::FieldGroupReader>::handle_field(req, field, limits, state, #duplicate_field)
),
));
}
// from_state() implementation
let mut from_state_impl = quote!();
for field in &parsed {
let name = &field.serialization_name;
let rust_name = &field.rust_name;
let ty = &field.ty;
from_state_impl.extend(quote!(
#rust_name: <#ty as ::actix_multipart::form::FieldGroupReader>::from_state(#name, &mut state)?,
));
}
let gen = quote! {
impl ::actix_multipart::form::MultipartCollect for #name {
fn limit(field_name: &str) -> ::std::option::Option<usize> {
match field_name {
#limit_impl
_ => None,
}
}
fn handle_field<'t>(
req: &'t ::actix_web::HttpRequest,
field: ::actix_multipart::Field,
limits: &'t mut ::actix_multipart::form::Limits,
state: &'t mut ::actix_multipart::form::State,
) -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = ::std::result::Result<(), ::actix_multipart::MultipartError>> + 't>> {
match field.name().unwrap() {
#handle_field_impl
_ => return ::std::boxed::Box::pin(::std::future::ready(#unknown_field_result)),
}
}
fn from_state(mut state: ::actix_multipart::form::State) -> ::std::result::Result<Self, ::actix_multipart::MultipartError> {
Ok(Self {
#from_state_impl
})
}
}
};
gen.into()
}
/// Transform a syn error into a token stream for returning.
fn compile_err(err: syn::Error) -> TokenStream {
TokenStream::from(err.to_compile_error())
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild.rs | actix-multipart-derive/tests/trybuild.rs | #[rustversion_msrv::msrv]
#[test]
fn compile_macros() {
let t = trybuild::TestCases::new();
t.pass("tests/trybuild/all-required.rs");
t.pass("tests/trybuild/optional-and-list.rs");
t.pass("tests/trybuild/rename.rs");
t.pass("tests/trybuild/deny-unknown.rs");
t.pass("tests/trybuild/deny-duplicates.rs");
t.compile_fail("tests/trybuild/deny-parse-fail.rs");
t.pass("tests/trybuild/size-limits.rs");
t.compile_fail("tests/trybuild/size-limit-parse-fail.rs");
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/rename.rs | actix-multipart-derive/tests/trybuild/rename.rs | use actix_web::{web, App, Responder};
use actix_multipart::form::{tempfile::TempFile, MultipartForm};
#[derive(MultipartForm)]
struct Form {
#[multipart(rename = "files[]")]
files: Vec<TempFile>,
}
async fn handler(_form: MultipartForm<Form>) -> impl Responder {
"Hello World!"
}
#[actix_web::main]
async fn main() {
App::new().default_service(web::to(handler));
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/optional-and-list.rs | actix-multipart-derive/tests/trybuild/optional-and-list.rs | use actix_web::{web, App, Responder};
use actix_multipart::form::{tempfile::TempFile, text::Text, MultipartForm};
#[derive(MultipartForm)]
struct Form {
category: Option<Text<String>>,
files: Vec<TempFile>,
}
async fn handler(_form: MultipartForm<Form>) -> impl Responder {
"Hello World!"
}
#[actix_web::main]
async fn main() {
App::new().default_service(web::to(handler));
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/size-limit-parse-fail.rs | actix-multipart-derive/tests/trybuild/size-limit-parse-fail.rs | use actix_multipart::form::{text::Text, MultipartForm};
#[derive(MultipartForm)]
struct Form {
#[multipart(limit = "2 bytes")]
description: Text<String>,
}
#[derive(MultipartForm)]
struct Form2 {
#[multipart(limit = "2 megabytes")]
description: Text<String>,
}
#[derive(MultipartForm)]
struct Form3 {
#[multipart(limit = "four meters")]
description: Text<String>,
}
fn main() {}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/deny-parse-fail.rs | actix-multipart-derive/tests/trybuild/deny-parse-fail.rs | use actix_multipart::form::MultipartForm;
#[derive(MultipartForm)]
#[multipart(duplicate_field = "no")]
struct Form {}
fn main() {}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/all-required.rs | actix-multipart-derive/tests/trybuild/all-required.rs | use actix_web::{web, App, Responder};
use actix_multipart::form::{tempfile::TempFile, text::Text, MultipartForm};
#[derive(Debug, MultipartForm)]
struct ImageUpload {
description: Text<String>,
timestamp: Text<i64>,
image: TempFile,
}
async fn handler(_form: MultipartForm<ImageUpload>) -> impl Responder {
"Hello World!"
}
#[actix_web::main]
async fn main() {
App::new().default_service(web::to(handler));
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/deny-duplicates.rs | actix-multipart-derive/tests/trybuild/deny-duplicates.rs | use actix_web::{web, App, Responder};
use actix_multipart::form::MultipartForm;
#[derive(MultipartForm)]
#[multipart(duplicate_field = "deny")]
struct Form {}
async fn handler(_form: MultipartForm<Form>) -> impl Responder {
"Hello World!"
}
#[actix_web::main]
async fn main() {
App::new().default_service(web::to(handler));
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/size-limits.rs | actix-multipart-derive/tests/trybuild/size-limits.rs | use actix_web::{web, App, Responder};
use actix_multipart::form::{tempfile::TempFile, text::Text, MultipartForm};
#[derive(MultipartForm)]
struct Form {
#[multipart(limit = "2 KiB")]
description: Text<String>,
#[multipart(limit = "512 MiB")]
files: Vec<TempFile>,
}
async fn handler(_form: MultipartForm<Form>) -> impl Responder {
"Hello World!"
}
#[actix_web::main]
async fn main() {
App::new().default_service(web::to(handler));
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
actix/actix-web | https://github.com/actix/actix-web/blob/024addfc4063814ba9ddd5e0ac06992e74b98e5b/actix-multipart-derive/tests/trybuild/deny-unknown.rs | actix-multipart-derive/tests/trybuild/deny-unknown.rs | use actix_web::{web, App, Responder};
use actix_multipart::form::MultipartForm;
#[derive(MultipartForm)]
#[multipart(deny_unknown_fields)]
struct Form {}
async fn handler(_form: MultipartForm<Form>) -> impl Responder {
"Hello World!"
}
#[actix_web::main]
async fn main() {
App::new().default_service(web::to(handler));
}
| rust | Apache-2.0 | 024addfc4063814ba9ddd5e0ac06992e74b98e5b | 2026-01-04T15:37:59.021020Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/fuzz/fuzz_targets/ruff_parse_simple.rs | fuzz/fuzz_targets/ruff_parse_simple.rs | //! Fuzzer harness which merely explores the parse/unparse coverage space and tries to make it
//! crash. On its own, this fuzzer is (hopefully) not going to find a crash.
#![no_main]
use libfuzzer_sys::{fuzz_target, Corpus};
use ruff_python_codegen::{Generator, Stylist};
use ruff_python_parser::{parse_module, ParseError};
use ruff_text_size::Ranged;
fn do_fuzz(case: &[u8]) -> Corpus {
let Ok(code) = std::str::from_utf8(case) else {
return Corpus::Reject;
};
// just round-trip it once to trigger both parse and unparse
let parsed = match parse_module(code) {
Ok(parsed) => parsed,
Err(ParseError { location, .. }) => {
let offset = location.start().to_usize();
assert!(
code.is_char_boundary(offset),
"Invalid error location {} (not at char boundary)",
offset
);
return Corpus::Keep;
}
};
for token in parsed.tokens() {
let start = token.start().to_usize();
let end = token.end().to_usize();
assert!(
code.is_char_boundary(start),
"Invalid start position {} (not at char boundary)",
start
);
assert!(
code.is_char_boundary(end),
"Invalid end position {} (not at char boundary)",
end
);
}
let stylist = Stylist::from_tokens(parsed.tokens(), code);
let mut generator: Generator = (&stylist).into();
generator.unparse_suite(parsed.suite());
Corpus::Keep
}
fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/fuzz/fuzz_targets/ruff_formatter_idempotency.rs | fuzz/fuzz_targets/ruff_formatter_idempotency.rs | //! Fuzzer harness which double formats the input and access the idempotency or unsteady state of the
//! ruff's formatter.
#![no_main]
use libfuzzer_sys::{fuzz_target, Corpus};
use similar::TextDiff;
use ruff_python_formatter::{format_module_source, PyFormatOptions};
fn do_fuzz(case: &[u8]) -> Corpus {
// Throw away inputs which aren't utf-8
let Ok(code) = std::str::from_utf8(case) else {
return Corpus::Reject;
};
let options = PyFormatOptions::default();
// format the code once
if let Ok(formatted) = format_module_source(code, options.clone()) {
let formatted = formatted.as_code();
// reformat the code second time
if let Ok(reformatted) = format_module_source(formatted, options.clone()) {
let reformatted = reformatted.as_code();
if formatted != reformatted {
let diff = TextDiff::from_lines(formatted, reformatted)
.unified_diff()
.header("Formatted Once", "Formatted Twice")
.to_string();
panic!(
"\nReformatting the code a second time resulted in formatting changes.\nInput: {:?}\ndiff:\n{}",
code, diff
);
}
} else {
panic!(
"Unable to format the code second time:\nInput:{:?}\nformatted:\n{:?}",
code, formatted
);
}
}
Corpus::Keep
}
fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/fuzz/fuzz_targets/ruff_formatter_validity.rs | fuzz/fuzz_targets/ruff_formatter_validity.rs | //! Fuzzer harness which actively tries to find testcases that cause Ruff to introduce errors into
//! the resulting file.
#![no_main]
use std::collections::HashMap;
use std::sync::OnceLock;
use libfuzzer_sys::{fuzz_target, Corpus};
use ruff_linter::linter::ParseSource;
use ruff_linter::settings::flags::Noqa;
use ruff_linter::settings::LinterSettings;
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::PySourceType;
use ruff_python_formatter::{format_module_source, PyFormatOptions};
use similar::TextDiff;
static SETTINGS: OnceLock<LinterSettings> = OnceLock::new();
fn do_fuzz(case: &[u8]) -> Corpus {
// throw away inputs which aren't utf-8
let Ok(code) = std::str::from_utf8(case) else {
return Corpus::Reject;
};
// the settings are immutable to test_snippet, so we avoid re-initialising here
let linter_settings = SETTINGS.get_or_init(LinterSettings::default);
let format_options = PyFormatOptions::default();
let linter_result = ruff_linter::linter::lint_only(
"fuzzed-source.py".as_ref(),
None,
linter_settings,
Noqa::Enabled,
&SourceKind::Python(code.to_string()),
PySourceType::Python,
ParseSource::None,
);
if linter_result.has_invalid_syntax() {
return Corpus::Keep; // keep, but don't continue
}
let mut warnings = HashMap::new();
for msg in &linter_result.diagnostics {
let count: &mut usize = warnings.entry(msg.name()).or_default();
*count += 1;
}
// format the code once
if let Ok(formatted) = format_module_source(code, format_options.clone()) {
let formatted = formatted.as_code().to_string();
let linter_result = ruff_linter::linter::lint_only(
"fuzzed-source.py".as_ref(),
None,
linter_settings,
Noqa::Enabled,
&SourceKind::Python(formatted.clone()),
PySourceType::Python,
ParseSource::None,
);
assert!(
linter_result.has_invalid_syntax(),
"formatter introduced a parse error"
);
for msg in &linter_result.diagnostics {
if let Some(count) = warnings.get_mut(msg.name()) {
if let Some(decremented) = count.checked_sub(1) {
*count = decremented;
} else {
panic!(
"formatter introduced additional linter warning: {msg:?}\ndiff: {}",
TextDiff::from_lines(code, &formatted)
.unified_diff()
.header("Unformatted", "Formatted")
);
}
} else {
panic!(
"formatter introduced new linter warning that was not previously present: {msg:?}\ndiff: {}",
TextDiff::from_lines(code, &formatted)
.unified_diff()
.header("Unformatted", "Formatted")
);
}
}
}
Corpus::Keep
}
fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/fuzz/fuzz_targets/ty_check_invalid_syntax.rs | fuzz/fuzz_targets/ty_check_invalid_syntax.rs | //! Fuzzer harness that runs the type checker to catch for panics for source code containing
//! syntax errors.
#![no_main]
use std::sync::{Arc, Mutex, OnceLock};
use libfuzzer_sys::{Corpus, fuzz_target};
use ruff_db::Db as SourceDb;
use ruff_db::files::{File, Files, system_path_to_file};
use ruff_db::system::{
DbWithTestSystem, DbWithWritableSystem as _, System, SystemPathBuf, TestSystem,
};
use ruff_db::vendored::VendoredFileSystem;
use ruff_python_ast::PythonVersion;
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
use ty_module_resolver::{Db as ModuleResolverDb, SearchPathSettings};
use ty_python_semantic::lint::LintRegistry;
use ty_python_semantic::types::check_types;
use ty_python_semantic::{
AnalysisSettings, Db as SemanticDb, Program, ProgramSettings, PythonPlatform,
PythonVersionWithSource, default_lint_registry, lint::RuleSelection,
};
/// Database that can be used for testing.
///
/// Uses an in memory filesystem and it stubs out the vendored files by default.
#[salsa::db]
#[derive(Clone)]
struct TestDb {
storage: salsa::Storage<Self>,
files: Files,
system: TestSystem,
vendored: VendoredFileSystem,
rule_selection: Arc<RuleSelection>,
analysis_settings: Arc<AnalysisSettings>,
}
impl TestDb {
fn new() -> Self {
Self {
storage: salsa::Storage::new(Some(Box::new({
move |event| {
tracing::trace!("event: {:?}", event);
}
}))),
system: TestSystem::default(),
vendored: ty_vendored::file_system().clone(),
files: Files::default(),
rule_selection: RuleSelection::from_registry(default_lint_registry()).into(),
analysis_settings: AnalysisSettings::default().into(),
}
}
}
#[salsa::db]
impl SourceDb for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
}
fn system(&self) -> &dyn System {
&self.system
}
fn files(&self) -> &Files {
&self.files
}
fn python_version(&self) -> PythonVersion {
Program::get(self).python_version(self)
}
}
impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem {
&self.system
}
fn test_system_mut(&mut self) -> &mut TestSystem {
&mut self.system
}
}
#[salsa::db]
impl ModuleResolverDb for TestDb {
fn search_paths(&self) -> &ty_module_resolver::SearchPaths {
Program::get(self).search_paths(self)
}
}
#[salsa::db]
impl SemanticDb for TestDb {
fn should_check_file(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}
fn rule_selection(&self, _file: File) -> &RuleSelection {
&self.rule_selection
}
fn analysis_settings(&self) -> &AnalysisSettings {
&self.analysis_settings
}
fn lint_registry(&self) -> &LintRegistry {
default_lint_registry()
}
fn verbose(&self) -> bool {
false
}
}
#[salsa::db]
impl salsa::Database for TestDb {}
fn setup_db() -> TestDb {
let db = TestDb::new();
let src_root = SystemPathBuf::from("/src");
db.memory_file_system()
.create_directory_all(&src_root)
.unwrap();
Program::from_settings(
&db,
ProgramSettings {
python_version: PythonVersionWithSource::default(),
python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings::new(vec![src_root])
.to_search_paths(db.system(), db.vendored())
.expect("Valid search path settings"),
},
);
db
}
static TEST_DB: OnceLock<Mutex<TestDb>> = OnceLock::new();
fn do_fuzz(case: &[u8]) -> Corpus {
let Ok(code) = std::str::from_utf8(case) else {
return Corpus::Reject;
};
let parsed = parse_unchecked(code, ParseOptions::from(Mode::Module));
if parsed.has_valid_syntax() {
return Corpus::Reject;
}
let mut db = TEST_DB
.get_or_init(|| Mutex::new(setup_db()))
.lock()
.unwrap();
for path in &["/src/a.py", "/src/a.pyi"] {
db.write_file(path, code).unwrap();
let file = system_path_to_file(&*db, path).unwrap();
check_types(&*db, file);
db.memory_file_system().remove_file(path).unwrap();
file.sync(&mut *db);
}
Corpus::Keep
}
fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/fuzz/fuzz_targets/ruff_parse_idempotency.rs | fuzz/fuzz_targets/ruff_parse_idempotency.rs | //! Fuzzer harness which searches for situations where the parser does not parse or unparse a
//! particular source snippet consistently.
#![no_main]
use libfuzzer_sys::{fuzz_target, Corpus};
use ruff_python_codegen::round_trip;
use similar::TextDiff;
fn do_fuzz(case: &[u8]) -> Corpus {
let Ok(code) = std::str::from_utf8(case) else {
return Corpus::Reject;
};
// round trip it once to get a formatted version
if let Ok(first) = round_trip(code) {
// round trip it a second time to get a case to compare against
if let Ok(second) = round_trip(&first) {
if cfg!(feature = "full-idempotency") {
// potentially, we don't want to test for full idempotency, but just for unsteady states
// enable the "full-idempotency" feature when fuzzing for full idempotency
let diff = TextDiff::from_lines(&first, &second)
.unified_diff()
.header("Parsed once", "Parsed twice")
.to_string();
assert_eq!(
first, second,
"\nIdempotency violation (orig => first => second); original: {:?}\ndiff:\n{}",
code, diff
);
} else if first != second {
// by the third time we've round-tripped it, we shouldn't be introducing any more
// changes; if we do, then it's likely that we're in an unsteady parsing state
let third = round_trip(&second).expect("Couldn't round-trip the processed source.");
let diff = TextDiff::from_lines(&second, &third)
.unified_diff()
.header("Parsed twice", "Parsed three times")
.to_string();
assert_eq!(
second, third,
"\nPotential unsteady state (orig => first => second => third); original: {:?}\ndiff:\n{}",
code, diff
);
}
} else {
panic!(
"Unable to perform the second round trip!\nbefore: {:?}\nfirst: {:?}",
code, first
);
}
}
Corpus::Keep
}
fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/fuzz/fuzz_targets/ruff_fix_validity.rs | fuzz/fuzz_targets/ruff_fix_validity.rs | //! Fuzzer harness which actively tries to find testcases that cause Ruff to introduce errors into
//! the resulting file.
#![no_main]
use libfuzzer_sys::{fuzz_target, Corpus};
use ruff_linter::settings::LinterSettings;
use std::sync::OnceLock;
static SETTINGS: OnceLock<LinterSettings> = OnceLock::new();
fn do_fuzz(case: &[u8]) -> Corpus {
// throw away inputs which aren't utf-8
let Ok(code) = std::str::from_utf8(case) else {
return Corpus::Reject;
};
// the settings are immutable to test_snippet, so we avoid re-initialising here
let settings = SETTINGS.get_or_init(LinterSettings::default);
ruff_linter::test::set_max_iterations(usize::MAX);
// unlike in the test framework, where the number of iterations is well-defined, we are only
// looking for situations where a fix is bad; thus, we set the iterations to "infinite"
let _ = ruff_linter::test::test_snippet(code, settings);
Corpus::Keep
}
fuzz_target!(|case: &[u8]| -> Corpus { do_fuzz(case) });
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_diagnostics/src/source_map.rs | crates/ruff_diagnostics/src/source_map.rs | use ruff_text_size::{Ranged, TextSize};
use crate::Edit;
/// Lightweight sourcemap marker representing the source and destination
/// position for an [`Edit`].
#[derive(Debug, PartialEq, Eq)]
pub struct SourceMarker {
/// Position of the marker in the original source.
source: TextSize,
/// Position of the marker in the transformed code.
dest: TextSize,
}
impl SourceMarker {
pub fn new(source: TextSize, dest: TextSize) -> Self {
Self { source, dest }
}
pub const fn source(&self) -> TextSize {
self.source
}
pub const fn dest(&self) -> TextSize {
self.dest
}
}
/// A collection of [`SourceMarker`].
///
/// Sourcemaps are used to map positions in the original source to positions in
/// the transformed code. Here, only the boundaries of edits are tracked instead
/// of every single character.
#[derive(Default, PartialEq, Eq)]
pub struct SourceMap(Vec<SourceMarker>);
impl SourceMap {
/// Returns a slice of all the markers in the sourcemap in the order they
/// were added.
pub fn markers(&self) -> &[SourceMarker] {
&self.0
}
/// Push the start marker for an [`Edit`].
///
/// The `output_length` is the length of the transformed string before the
/// edit is applied.
pub fn push_start_marker(&mut self, edit: &Edit, output_length: TextSize) {
self.push_marker(edit.start(), output_length);
}
/// Push the end marker for an [`Edit`].
///
/// The `output_length` is the length of the transformed string after the
/// edit has been applied.
pub fn push_end_marker(&mut self, edit: &Edit, output_length: TextSize) {
if edit.is_insertion() {
self.push_marker(edit.start(), output_length);
} else {
// Deletion or replacement
self.push_marker(edit.end(), output_length);
}
}
/// Push a new marker to the sourcemap.
pub fn push_marker(&mut self, offset: TextSize, output_length: TextSize) {
self.0.push(SourceMarker {
source: offset,
dest: output_length,
});
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_diagnostics/src/lib.rs | crates/ruff_diagnostics/src/lib.rs | pub use edit::Edit;
pub use fix::{Applicability, Fix, IsolationLevel};
pub use source_map::{SourceMap, SourceMarker};
mod edit;
mod fix;
mod source_map;
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_diagnostics/src/fix.rs | crates/ruff_diagnostics/src/fix.rs | #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use ruff_text_size::{Ranged, TextSize};
use crate::edit::Edit;
/// Indicates if a fix can be applied.
#[derive(
Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is, get_size2::GetSize,
)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
pub enum Applicability {
/// The fix is unsafe and should only be displayed for manual application by the user.
///
/// The fix is likely to be incorrect or the resulting code may have invalid syntax.
DisplayOnly,
/// The fix is unsafe and should only be applied with user opt-in.
///
/// The fix may be what the user intended, but it is uncertain. The resulting code will have
/// valid syntax, but may lead to a change in runtime behavior, the removal of user comments,
/// or both.
Unsafe,
/// The fix is safe and can always be applied.
///
/// The fix is definitely what the user intended, or maintains the exact meaning of the code.
/// User comments are preserved, unless the fix removes an entire statement or expression.
Safe,
}
/// Indicates the level of isolation required to apply a fix.
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, get_size2::GetSize)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum IsolationLevel {
/// The fix should be applied as long as no other fixes in the same group have been applied.
Group(u32),
/// The fix should be applied as long as it does not overlap with any other fixes.
#[default]
NonOverlapping,
}
/// A collection of [`Edit`] elements to be applied to a source file.
#[derive(Debug, PartialEq, Eq, Clone, Hash, get_size2::GetSize)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Fix {
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.
edits: Vec<Edit>,
/// The [`Applicability`] of the fix.
applicability: Applicability,
/// The [`IsolationLevel`] of the fix.
isolation_level: IsolationLevel,
}
impl Fix {
/// Create a new [`Fix`] that is [safe](Applicability::Safe) to apply from an [`Edit`] element.
pub fn safe_edit(edit: Edit) -> Self {
Self {
edits: vec![edit],
applicability: Applicability::Safe,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] that is [safe](Applicability::Safe) to apply from multiple [`Edit`] elements.
pub fn safe_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
edits.sort_by_key(|edit| (edit.start(), edit.end()));
Self {
edits,
applicability: Applicability::Safe,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] that is [unsafe](Applicability::Unsafe) to apply from an [`Edit`] element.
pub fn unsafe_edit(edit: Edit) -> Self {
Self {
edits: vec![edit],
applicability: Applicability::Unsafe,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] that is [unsafe](Applicability::Unsafe) to apply from multiple [`Edit`] elements.
pub fn unsafe_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
edits.sort_by_key(|edit| (edit.start(), edit.end()));
Self {
edits,
applicability: Applicability::Unsafe,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] that should only [display](Applicability::DisplayOnly) and not apply from an [`Edit`] element .
pub fn display_only_edit(edit: Edit) -> Self {
Self {
edits: vec![edit],
applicability: Applicability::DisplayOnly,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] that should only [display](Applicability::DisplayOnly) and not apply from multiple [`Edit`] elements.
pub fn display_only_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
edits.sort_by_key(|edit| (edit.start(), edit.end()));
Self {
edits,
applicability: Applicability::DisplayOnly,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] with the specified [`Applicability`] to apply an [`Edit`] element.
pub fn applicable_edit(edit: Edit, applicability: Applicability) -> Self {
Self {
edits: vec![edit],
applicability,
isolation_level: IsolationLevel::default(),
}
}
/// Create a new [`Fix`] with the specified [`Applicability`] to apply multiple [`Edit`] elements.
pub fn applicable_edits(
edit: Edit,
rest: impl IntoIterator<Item = Edit>,
applicability: Applicability,
) -> Self {
let mut edits: Vec<Edit> = std::iter::once(edit).chain(rest).collect();
edits.sort_by_key(|edit| (edit.start(), edit.end()));
Self {
edits,
applicability,
isolation_level: IsolationLevel::default(),
}
}
/// Return the [`TextSize`] of the first [`Edit`] in the [`Fix`].
pub fn min_start(&self) -> Option<TextSize> {
self.edits.first().map(Edit::start)
}
/// Return a slice of the [`Edit`] elements in the [`Fix`], sorted by [`Edit::start`] in ascending order.
pub fn edits(&self) -> &[Edit] {
&self.edits
}
pub fn into_edits(self) -> Vec<Edit> {
self.edits
}
/// Return the [`Applicability`] of the [`Fix`].
pub fn applicability(&self) -> Applicability {
self.applicability
}
/// Return the [`IsolationLevel`] of the [`Fix`].
pub fn isolation(&self) -> IsolationLevel {
self.isolation_level
}
/// Create a new [`Fix`] with the given [`IsolationLevel`].
#[must_use]
pub fn isolate(mut self, isolation: IsolationLevel) -> Self {
self.isolation_level = isolation;
self
}
/// Return [`true`] if this [`Fix`] should be applied with at a given [`Applicability`].
pub fn applies(&self, applicability: Applicability) -> bool {
self.applicability >= applicability
}
/// Create a new [`Fix`] with the given [`Applicability`].
#[must_use]
pub fn with_applicability(mut self, applicability: Applicability) -> Self {
self.applicability = applicability;
self
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_diagnostics/src/edit.rs | crates/ruff_diagnostics/src/edit.rs | use std::cmp::Ordering;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use ruff_text_size::{Ranged, TextRange, TextSize};
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
/// content at a given location.
#[derive(Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Edit {
/// The start location of the edit.
range: TextRange,
/// The replacement content to insert between the start and end locations.
content: Option<Box<str>>,
}
impl Edit {
/// Creates an edit that deletes the content in the `start` to `end` range.
#[inline]
pub const fn deletion(start: TextSize, end: TextSize) -> Self {
Self::range_deletion(TextRange::new(start, end))
}
/// Creates an edit that deletes the content in `range`.
pub const fn range_deletion(range: TextRange) -> Self {
Self {
content: None,
range,
}
}
/// Creates an edit that replaces the content in the `start` to `end` range with `content`.
#[inline]
pub fn replacement(content: String, start: TextSize, end: TextSize) -> Self {
Self::range_replacement(content, TextRange::new(start, end))
}
/// Creates an edit that replaces the content in `range` with `content`.
pub fn range_replacement(content: String, range: TextRange) -> Self {
debug_assert!(!content.is_empty(), "Prefer `Edit::deletion`");
Self {
content: Some(Box::from(content)),
range,
}
}
/// Creates an edit that inserts `content` at the [`TextSize`] `at`.
pub fn insertion(content: String, at: TextSize) -> Self {
debug_assert!(!content.is_empty(), "Insert content is empty");
Self {
content: Some(Box::from(content)),
range: TextRange::new(at, at),
}
}
/// Returns the new content for an insertion or deletion.
pub fn content(&self) -> Option<&str> {
self.content.as_deref()
}
pub fn into_content(self) -> Option<Box<str>> {
self.content
}
fn kind(&self) -> EditOperationKind {
if self.content.is_none() {
EditOperationKind::Deletion
} else if self.range.is_empty() {
EditOperationKind::Insertion
} else {
EditOperationKind::Replacement
}
}
/// Returns `true` if this edit deletes content from the source document.
#[inline]
pub fn is_deletion(&self) -> bool {
self.kind().is_deletion()
}
/// Returns `true` if this edit inserts new content into the source document.
#[inline]
pub fn is_insertion(&self) -> bool {
self.kind().is_insertion()
}
/// Returns `true` if this edit replaces some existing content with new content.
#[inline]
pub fn is_replacement(&self) -> bool {
self.kind().is_replacement()
}
}
impl Ord for Edit {
fn cmp(&self, other: &Self) -> Ordering {
self.start()
.cmp(&other.start())
.then_with(|| self.end().cmp(&other.end()))
.then_with(|| self.content.cmp(&other.content))
}
}
impl PartialOrd for Edit {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ranged for Edit {
fn range(&self) -> TextRange {
self.range
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
enum EditOperationKind {
/// Edit that inserts new content into the source document.
Insertion,
/// Edit that deletes content from the source document.
Deletion,
/// Edit that replaces content from the source document.
Replacement,
}
impl EditOperationKind {
pub(crate) const fn is_insertion(self) -> bool {
matches!(self, EditOperationKind::Insertion)
}
pub(crate) const fn is_deletion(self) -> bool {
matches!(self, EditOperationKind::Deletion)
}
pub(crate) const fn is_replacement(self) -> bool {
matches!(self, EditOperationKind::Replacement)
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/src/lib.rs | crates/ruff_benchmark/src/lib.rs | use std::path::PathBuf;
#[cfg(any(feature = "ty_instrumented", feature = "ruff_instrumented"))]
pub mod criterion;
pub mod real_world_projects;
pub static NUMPY_GLOBALS: TestFile = TestFile::new(
"numpy/globals.py",
include_str!("../resources/numpy/globals.py"),
);
pub static UNICODE_PYPINYIN: TestFile = TestFile::new(
"unicode/pypinyin.py",
include_str!("../resources/pypinyin.py"),
);
pub static PYDANTIC_TYPES: TestFile = TestFile::new(
"pydantic/types.py",
include_str!("../resources/pydantic/types.py"),
);
pub static NUMPY_CTYPESLIB: TestFile = TestFile::new(
"numpy/ctypeslib.py",
include_str!("../resources/numpy/ctypeslib.py"),
);
// "https://raw.githubusercontent.com/DHI/mikeio/b7d26418f4db2909b0aa965253dbe83194d7bb5b/tests/test_dataset.py"
pub static LARGE_DATASET: TestFile = TestFile::new(
"large/dataset.py",
include_str!("../resources/large/dataset.py"),
);
/// Relative size of a test case. Benchmarks can use it to configure the time for how long a benchmark should run to get stable results.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
pub enum TestCaseSpeed {
/// A test case that is fast to run
Fast,
/// A normal test case
Normal,
/// A slow test case
Slow,
}
#[derive(Debug, Clone)]
pub struct TestCase {
file: TestFile,
speed: TestCaseSpeed,
}
impl TestCase {
pub const fn fast(file: TestFile) -> Self {
Self {
file,
speed: TestCaseSpeed::Fast,
}
}
pub const fn normal(file: TestFile) -> Self {
Self {
file,
speed: TestCaseSpeed::Normal,
}
}
pub const fn slow(file: TestFile) -> Self {
Self {
file,
speed: TestCaseSpeed::Slow,
}
}
pub fn code(&self) -> &str {
self.file.code
}
pub fn name(&self) -> &str {
self.file.name
}
pub fn speed(&self) -> TestCaseSpeed {
self.speed
}
pub fn path(&self) -> PathBuf {
PathBuf::from(file!())
.parent()
.unwrap()
.parent()
.unwrap()
.join("resources")
.join(self.name())
}
}
#[derive(Debug, Clone)]
pub struct TestFile {
name: &'static str,
code: &'static str,
}
impl TestFile {
pub const fn new(name: &'static str, code: &'static str) -> Self {
Self { name, code }
}
pub fn code(&self) -> &str {
self.code
}
pub fn name(&self) -> &str {
self.name
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/src/real_world_projects.rs | crates/ruff_benchmark/src/real_world_projects.rs | #![allow(clippy::print_stderr)]
//! Infrastructure for benchmarking real-world Python projects.
//!
//! The module uses a setup similar to mypy primer's, which should make it easy
//! to add new benchmarks for projects in [mypy primer's project's list](https://github.com/hauntsaninja/mypy_primer/blob/ebaa9fd27b51a278873b63676fd25490cec6823b/mypy_primer/projects.py#L74).
//!
//! The basic steps for a project are:
//! 1. Clone or update the project into a directory inside `./target`. The commits are pinnted to prevent flaky benchmark results due to new commits.
//! 2. For projects with dependencies, run uv to create a virtual environment and install the dependencies.
//! 3. (optionally) Copy the entire project structure into a memory file system to reduce the IO noise in benchmarks.
//! 4. (not in this module) Create a `ProjectDatabase` and run the benchmark.
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::Instant;
use anyhow::{Context, Result};
use ruff_db::system::{MemoryFileSystem, SystemPath, SystemPathBuf};
use ruff_python_ast::PythonVersion;
/// Configuration for a real-world project to benchmark
#[derive(Debug, Clone)]
pub struct RealWorldProject<'a> {
// The name of the project.
pub name: &'a str,
/// The project's GIT repository. Must be publicly accessible.
pub repository: &'a str,
/// Specific commit hash to checkout
pub commit: &'a str,
/// List of paths within the project to check (`ty check <paths>`)
pub paths: &'a [&'a str],
/// Dependencies to install via uv
pub dependencies: &'a [&'a str],
/// Limit candidate packages to those that were uploaded prior to a given point in time (ISO 8601 format).
/// Maps to uv's `exclude-newer`.
pub max_dep_date: &'a str,
/// Python version to use
pub python_version: PythonVersion,
}
impl<'a> RealWorldProject<'a> {
/// Setup a real-world project for benchmarking
pub fn setup(self) -> Result<InstalledProject<'a>> {
let start = Instant::now();
tracing::debug!("Setting up project {}", self.name);
// Create project directory in cargo target
let project_root = get_project_cache_dir(self.name)?;
// Clone the repository if it doesn't exist, or update if it does
if project_root.exists() {
tracing::debug!("Updating repository for project '{}'...", self.name);
let start = std::time::Instant::now();
update_repository(&project_root, self.commit)?;
tracing::debug!(
"Repository update completed in {:.2}s",
start.elapsed().as_secs_f64()
);
} else {
tracing::debug!("Cloning repository for project '{}'...", self.name);
let start = std::time::Instant::now();
clone_repository(self.repository, &project_root, self.commit)?;
tracing::debug!(
"Repository clone completed in {:.2}s",
start.elapsed().as_secs_f64()
);
}
let checkout = Checkout {
path: project_root,
project: self,
};
// Install dependencies if specified
tracing::debug!(
"Installing {} dependencies for project '{}'...",
checkout.project().dependencies.len(),
checkout.project().name
);
let start_install = std::time::Instant::now();
install_dependencies(&checkout)?;
tracing::debug!(
"Dependency installation completed in {:.2}s",
start_install.elapsed().as_secs_f64()
);
tracing::debug!("Project setup took: {:.2}s", start.elapsed().as_secs_f64());
Ok(InstalledProject {
path: checkout.path,
config: checkout.project,
})
}
}
struct Checkout<'a> {
project: RealWorldProject<'a>,
path: PathBuf,
}
impl<'a> Checkout<'a> {
/// Get the virtual environment path
fn venv_path(&self) -> PathBuf {
self.path.join(".venv")
}
fn project(&self) -> &RealWorldProject<'a> {
&self.project
}
}
/// Checked out project with its dependencies installed.
pub struct InstalledProject<'a> {
/// Path to the cloned project
pub path: PathBuf,
/// Project configuration
pub config: RealWorldProject<'a>,
}
impl<'a> InstalledProject<'a> {
/// Get the project configuration
pub fn config(&self) -> &RealWorldProject<'a> {
&self.config
}
/// Get the benchmark paths
pub fn check_paths(&self) -> &[&str] {
self.config.paths
}
/// Get the virtual environment path
pub fn venv_path(&self) -> PathBuf {
self.path.join(".venv")
}
/// Copies the entire project to a memory file system.
pub fn copy_to_memory_fs(&self) -> anyhow::Result<MemoryFileSystem> {
let fs = MemoryFileSystem::new();
copy_directory_recursive(&fs, &self.path, &SystemPathBuf::from("/"))?;
Ok(fs)
}
}
/// Get the cache directory for a project in the cargo target directory
fn get_project_cache_dir(project_name: &str) -> Result<std::path::PathBuf> {
let target_dir = cargo_target_directory()
.cloned()
.unwrap_or_else(|| PathBuf::from("target"));
let target_dir =
std::path::absolute(target_dir).context("Failed to construct an absolute path")?;
let cache_dir = target_dir.join("benchmark_cache").join(project_name);
if let Some(parent) = cache_dir.parent() {
std::fs::create_dir_all(parent).context("Failed to create cache directory")?;
}
Ok(cache_dir)
}
/// Update an existing repository
fn update_repository(project_root: &Path, commit: &str) -> Result<()> {
let output = Command::new("git")
.args(["fetch", "origin", commit])
.current_dir(project_root)
.output()
.context("Failed to execute git fetch command")?;
if !output.status.success() {
anyhow::bail!(
"Git fetch of commit {} failed: {}",
commit,
String::from_utf8_lossy(&output.stderr)
);
}
// Checkout specific commit
let output = Command::new("git")
.args(["checkout", commit])
.current_dir(project_root)
.output()
.context("Failed to execute git checkout command")?;
anyhow::ensure!(
output.status.success(),
"Git checkout of commit {} failed: {}",
commit,
String::from_utf8_lossy(&output.stderr)
);
Ok(())
}
/// Clone a git repository to the specified directory
fn clone_repository(repo_url: &str, target_dir: &Path, commit: &str) -> Result<()> {
// Create parent directory if it doesn't exist
if let Some(parent) = target_dir.parent() {
std::fs::create_dir_all(parent).context("Failed to create parent directory for clone")?;
}
// Clone with minimal depth and fetch only the specific commit
let output = Command::new("git")
.args([
"clone",
"--filter=blob:none", // Don't download large files initially
"--no-checkout", // Don't checkout files yet
repo_url,
target_dir.to_str().unwrap(),
])
.output()
.context("Failed to execute git clone command")?;
anyhow::ensure!(
output.status.success(),
"Git clone failed: {}",
String::from_utf8_lossy(&output.stderr)
);
// Fetch the specific commit
let output = Command::new("git")
.args(["fetch", "origin", commit])
.current_dir(target_dir)
.output()
.context("Failed to execute git fetch command")?;
anyhow::ensure!(
output.status.success(),
"Git fetch of commit {} failed: {}",
commit,
String::from_utf8_lossy(&output.stderr)
);
// Checkout the specific commit
let output = Command::new("git")
.args(["checkout", commit])
.current_dir(target_dir)
.output()
.context("Failed to execute git checkout command")?;
anyhow::ensure!(
output.status.success(),
"Git checkout of commit {} failed: {}",
commit,
String::from_utf8_lossy(&output.stderr)
);
Ok(())
}
/// Install dependencies using uv with date constraints
fn install_dependencies(checkout: &Checkout) -> Result<()> {
// Check if uv is available
let uv_check = Command::new("uv")
.arg("--version")
.output()
.context("Failed to execute uv version check.")?;
if !uv_check.status.success() {
anyhow::bail!(
"uv is not installed or not found in PATH. If you need to install it, follow the instructions at https://docs.astral.sh/uv/getting-started/installation/"
);
}
let venv_path = checkout.venv_path();
let python_version_str = checkout.project().python_version.to_string();
let output = Command::new("uv")
.args(["venv", "--python", &python_version_str, "--allow-existing"])
.arg(&venv_path)
.output()
.context("Failed to execute uv venv command")?;
anyhow::ensure!(
output.status.success(),
"Failed to create virtual environment: {}",
String::from_utf8_lossy(&output.stderr)
);
if checkout.project().dependencies.is_empty() {
tracing::debug!(
"No dependencies to install for project '{}'",
checkout.project().name
);
return Ok(());
}
// Install dependencies with date constraint in the isolated environment
let mut cmd = Command::new("uv");
cmd.args([
"pip",
"install",
"--python",
venv_path.to_str().unwrap(),
"--exclude-newer",
checkout.project().max_dep_date,
])
.args(checkout.project().dependencies);
let output = cmd
.output()
.context("Failed to execute uv pip install command")?;
anyhow::ensure!(
output.status.success(),
"Dependency installation failed: {}",
String::from_utf8_lossy(&output.stderr)
);
Ok(())
}
/// Recursively load a directory into the memory filesystem
fn copy_directory_recursive(
fs: &MemoryFileSystem,
source_path: &Path,
dest_path: &SystemPath,
) -> Result<()> {
if source_path.is_file() {
if source_path.file_name().and_then(OsStr::to_str) == Some("pyvenv.cfg") {
// Skip pyvenv.cfg files because the Python path will be invalid.
return Ok(());
}
match std::fs::read_to_string(source_path) {
Ok(content) => {
fs.write_file_all(dest_path.to_path_buf(), content)
.with_context(|| {
format!("Failed to write file to memory filesystem: {dest_path}")
})?;
}
Err(error) => {
if error.kind() == std::io::ErrorKind::InvalidData {
// Skip binary files.
return Ok(());
}
return Err(error)
.with_context(|| format!("Failed to read file: {}", source_path.display()));
}
}
} else if source_path.is_dir() {
// Create directory in memory fs
fs.create_directory_all(dest_path.to_path_buf())
.with_context(|| {
format!("Failed to create directory in memory filesystem: {dest_path}")
})?;
// Read directory contents
let entries = std::fs::read_dir(source_path)
.with_context(|| format!("Failed to read directory: {}", source_path.display()))?;
for entry in entries {
let entry = entry.with_context(|| {
format!("Failed to read directory entry: {}", source_path.display())
})?;
let file_name = entry.file_name();
let file_name = file_name.to_str().context("Expected UTF8 path")?;
let source_child = source_path.join(file_name);
let dest_child = dest_path.join(file_name);
// Skip hidden files and common non-Python directories
if file_name != ".venv" && (file_name.starts_with('.') || matches!(file_name, ".git")) {
continue;
}
copy_directory_recursive(fs, &source_child, &dest_child)?;
}
}
Ok(())
}
static CARGO_TARGET_DIR: std::sync::OnceLock<Option<PathBuf>> = std::sync::OnceLock::new();
fn cargo_target_directory() -> Option<&'static PathBuf> {
CARGO_TARGET_DIR
.get_or_init(|| {
#[derive(serde::Deserialize)]
struct Metadata {
target_directory: PathBuf,
}
std::env::var_os("CARGO_TARGET_DIR")
.map(PathBuf::from)
.or_else(|| {
let output = Command::new(std::env::var_os("CARGO")?)
.args(["metadata", "--format-version", "1"])
.output()
.ok()?;
let metadata: Metadata = serde_json::from_slice(&output.stdout).ok()?;
Some(metadata.target_directory)
})
})
.as_ref()
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/src/criterion.rs | crates/ruff_benchmark/src/criterion.rs | //! This module re-exports the criterion API but picks the right backend depending on whether
//! the benchmarks are built to run locally or with codspeed.
//! The compat layer is required because codspeed doesn't support all platforms.
//! See [#12662](https://github.com/astral-sh/ruff/issues/12662)
#[cfg(not(codspeed))]
pub use criterion::*;
#[cfg(not(codspeed))]
pub type BenchmarkGroup<'a> = criterion::BenchmarkGroup<'a, measurement::WallTime>;
#[cfg(codspeed)]
pub use codspeed_criterion_compat::*;
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/benches/linter.rs | crates/ruff_benchmark/benches/linter.rs | use ruff_benchmark::criterion;
use criterion::{
BenchmarkGroup, BenchmarkId, Criterion, Throughput, criterion_group, criterion_main,
};
use ruff_benchmark::{
LARGE_DATASET, NUMPY_CTYPESLIB, NUMPY_GLOBALS, PYDANTIC_TYPES, TestCase, UNICODE_PYPINYIN,
};
use ruff_linter::linter::{ParseSource, lint_only};
use ruff_linter::rule_selector::PreviewOptions;
use ruff_linter::settings::rule_table::RuleTable;
use ruff_linter::settings::types::PreviewMode;
use ruff_linter::settings::{LinterSettings, flags};
use ruff_linter::source_kind::SourceKind;
use ruff_linter::{RuleSelector, registry::Rule};
use ruff_python_ast::PySourceType;
use ruff_python_parser::parse_module;
#[cfg(target_os = "windows")]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(all(
not(target_os = "windows"),
not(target_os = "openbsd"),
any(
target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "powerpc64",
target_arch = "riscv64"
)
))]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
// Disable decay after 10s because it can show up as *random* slow allocations
// in benchmarks. We don't need purging in benchmarks because it isn't important
// to give unallocated pages back to the OS.
// https://jemalloc.net/jemalloc.3.html#opt.dirty_decay_ms
#[cfg(all(
not(target_os = "windows"),
not(target_os = "openbsd"),
any(
target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "powerpc64",
target_arch = "riscv64"
)
))]
#[unsafe(export_name = "_rjem_malloc_conf")]
#[expect(non_upper_case_globals)]
#[expect(unsafe_code)]
pub static _rjem_malloc_conf: &[u8] = b"dirty_decay_ms:-1,muzzy_decay_ms:-1\0";
fn create_test_cases() -> Vec<TestCase> {
vec![
TestCase::fast(NUMPY_GLOBALS.clone()),
TestCase::fast(UNICODE_PYPINYIN.clone()),
TestCase::normal(PYDANTIC_TYPES.clone()),
TestCase::normal(NUMPY_CTYPESLIB.clone()),
TestCase::slow(LARGE_DATASET.clone()),
]
}
fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
let test_cases = create_test_cases();
for case in test_cases {
group.throughput(Throughput::Bytes(case.code().len() as u64));
group.bench_with_input(
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
// Parse the source.
let parsed =
parse_module(case.code()).expect("Input should be a valid Python code");
b.iter_batched(
|| parsed.clone(),
|parsed| {
// Assert that file contains no parse errors
assert!(parsed.has_valid_syntax());
let path = case.path();
lint_only(
&path,
None,
settings,
flags::Noqa::Enabled,
&SourceKind::Python(case.code().to_string()),
PySourceType::from(path.as_path()),
ParseSource::Precomputed(parsed),
)
},
criterion::BatchSize::SmallInput,
);
},
);
}
group.finish();
}
fn benchmark_default_rules(criterion: &mut Criterion) {
let group = criterion.benchmark_group("linter/default-rules");
benchmark_linter(group, &LinterSettings::default());
}
/// Disables IO based rules because they are a source of flakiness
fn disable_io_rules(rules: &mut RuleTable) {
rules.disable(Rule::ShebangMissingExecutableFile);
rules.disable(Rule::ShebangNotExecutable);
}
fn benchmark_all_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All
.rules(&PreviewOptions {
mode: PreviewMode::Disabled,
require_explicit: false,
})
.collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
..LinterSettings::default()
};
let group = criterion.benchmark_group("linter/all-rules");
benchmark_linter(group, &settings);
}
fn benchmark_preview_rules(criterion: &mut Criterion) {
let mut rules: RuleTable = RuleSelector::All.all_rules().collect();
disable_io_rules(&mut rules);
let settings = LinterSettings {
rules,
preview: PreviewMode::Enabled,
..LinterSettings::default()
};
let group = criterion.benchmark_group("linter/all-with-preview-rules");
benchmark_linter(group, &settings);
}
criterion_group!(default_rules, benchmark_default_rules);
criterion_group!(all_rules, benchmark_all_rules);
criterion_group!(preview_rules, benchmark_preview_rules);
criterion_main!(default_rules, all_rules, preview_rules);
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/benches/lexer.rs | crates/ruff_benchmark/benches/lexer.rs | use ruff_benchmark::criterion;
use criterion::{
BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime,
};
use ruff_benchmark::{
LARGE_DATASET, NUMPY_CTYPESLIB, NUMPY_GLOBALS, PYDANTIC_TYPES, TestCase, UNICODE_PYPINYIN,
};
use ruff_python_ast::token::TokenKind;
use ruff_python_parser::{Mode, lexer};
#[cfg(target_os = "windows")]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(all(
not(target_os = "windows"),
not(target_os = "openbsd"),
any(
target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "powerpc64",
target_arch = "riscv64"
)
))]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
fn create_test_cases() -> Vec<TestCase> {
vec![
TestCase::fast(NUMPY_GLOBALS.clone()),
TestCase::fast(UNICODE_PYPINYIN.clone()),
TestCase::normal(PYDANTIC_TYPES.clone()),
TestCase::normal(NUMPY_CTYPESLIB.clone()),
TestCase::slow(LARGE_DATASET.clone()),
]
}
fn benchmark_lexer(criterion: &mut Criterion<WallTime>) {
let test_cases = create_test_cases();
let mut group = criterion.benchmark_group("lexer");
for case in test_cases {
group.throughput(Throughput::Bytes(case.code().len() as u64));
group.bench_with_input(
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
b.iter(|| {
let mut lexer = lexer::lex(case.code(), Mode::Module);
loop {
let token = lexer.next_token();
match token {
TokenKind::EndOfFile => break,
TokenKind::Unknown => panic!("Input to be a valid Python source code"),
_ => {}
}
}
});
},
);
}
group.finish();
}
criterion_group!(lexer, benchmark_lexer);
criterion_main!(lexer);
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/benches/parser.rs | crates/ruff_benchmark/benches/parser.rs | use ruff_benchmark::criterion;
use criterion::{
BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime,
};
use ruff_benchmark::{
LARGE_DATASET, NUMPY_CTYPESLIB, NUMPY_GLOBALS, PYDANTIC_TYPES, TestCase, UNICODE_PYPINYIN,
};
use ruff_python_ast::Stmt;
use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt};
use ruff_python_parser::parse_module;
#[cfg(target_os = "windows")]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(all(
not(target_os = "windows"),
not(target_os = "openbsd"),
any(
target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "powerpc64",
target_arch = "riscv64"
)
))]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
fn create_test_cases() -> Vec<TestCase> {
vec![
TestCase::fast(NUMPY_GLOBALS.clone()),
TestCase::fast(UNICODE_PYPINYIN.clone()),
TestCase::normal(PYDANTIC_TYPES.clone()),
TestCase::normal(NUMPY_CTYPESLIB.clone()),
TestCase::slow(LARGE_DATASET.clone()),
]
}
struct CountVisitor {
count: usize,
}
impl<'a> StatementVisitor<'a> for CountVisitor {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
walk_stmt(self, stmt);
self.count += 1;
}
}
fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
let test_cases = create_test_cases();
let mut group = criterion.benchmark_group("parser");
for case in test_cases {
group.throughput(Throughput::Bytes(case.code().len() as u64));
group.bench_with_input(
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
b.iter(|| {
let parsed = parse_module(case.code())
.expect("Input should be a valid Python code")
.into_suite();
let mut visitor = CountVisitor { count: 0 };
visitor.visit_body(&parsed);
visitor.count
});
},
);
}
group.finish();
}
criterion_group!(parser, benchmark_parser);
criterion_main!(parser);
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/benches/ty_walltime.rs | crates/ruff_benchmark/benches/ty_walltime.rs | use divan::{Bencher, bench};
use std::fmt::{Display, Formatter};
use rayon::ThreadPoolBuilder;
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::testing::setup_logging_with_filter;
use ruff_python_ast::PythonVersion;
use ty_project::metadata::options::{EnvironmentOptions, Options};
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
struct Benchmark<'a> {
project: RealWorldProject<'a>,
installed_project: std::sync::OnceLock<InstalledProject<'a>>,
max_diagnostics: usize,
}
impl<'a> Benchmark<'a> {
const fn new(project: RealWorldProject<'a>, max_diagnostics: usize) -> Self {
Self {
project,
installed_project: std::sync::OnceLock::new(),
max_diagnostics,
}
}
fn installed_project(&self) -> &InstalledProject<'a> {
self.installed_project.get_or_init(|| {
self.project
.clone()
.setup()
.expect("Failed to setup project")
})
}
fn setup_iteration(&self) -> ProjectDatabase {
let installed_project = self.installed_project();
let root = SystemPathBuf::from_path_buf(installed_project.path.clone()).unwrap();
let system = OsSystem::new(&root);
let mut metadata = ProjectMetadata::discover(&root, &system).unwrap();
metadata.apply_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(installed_project.config.python_version)),
python: Some(RelativePathBuf::cli(SystemPath::new(".venv"))),
..EnvironmentOptions::default()
}),
..Options::default()
});
let mut db = ProjectDatabase::new(metadata, system).unwrap();
db.project().set_included_paths(
&mut db,
installed_project
.check_paths()
.iter()
.map(|path| SystemPath::absolute(path, &root))
.collect(),
);
db
}
}
impl Display for Benchmark<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.project.name.fmt(f)
}
}
fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) {
let result = db.check();
let diagnostics = result.len();
assert!(
diagnostics > 1 && diagnostics <= max_diagnostics,
"Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}",
);
}
static ALTAIR: Benchmark = Benchmark::new(
RealWorldProject {
name: "altair",
repository: "https://github.com/vega/altair",
commit: "d1f4a1ef89006e5f6752ef1f6df4b7a509336fba",
paths: &["altair"],
dependencies: &[
"jinja2",
"narwhals",
"numpy",
"packaging",
"pandas-stubs",
"pyarrow-stubs",
"pytest",
"scipy-stubs",
"types-jsonschema",
],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
1000,
);
static COLOUR_SCIENCE: Benchmark = Benchmark::new(
RealWorldProject {
name: "colour-science",
repository: "https://github.com/colour-science/colour",
commit: "a17e2335c29e7b6f08080aa4c93cfa9b61f84757",
paths: &["colour"],
dependencies: &[
"matplotlib",
"numpy",
"pandas-stubs",
"pytest",
"scipy-stubs",
],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY310,
},
1070,
);
static FREQTRADE: Benchmark = Benchmark::new(
RealWorldProject {
name: "freqtrade",
repository: "https://github.com/freqtrade/freqtrade",
commit: "2d842ea129e56575852ee0c45383c8c3f706be19",
paths: &["freqtrade"],
dependencies: &[
"numpy",
"pandas-stubs",
"pydantic",
"sqlalchemy",
"types-cachetools",
"types-filelock",
"types-python-dateutil",
"types-requests",
"types-tabulate",
],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
600,
);
static PANDAS: Benchmark = Benchmark::new(
RealWorldProject {
name: "pandas",
repository: "https://github.com/pandas-dev/pandas",
commit: "5909621e2267eb67943a95ef5e895e8484c53432",
paths: &["pandas"],
dependencies: &[
"numpy",
"types-python-dateutil",
"types-pytz",
"types-PyMySQL",
"types-setuptools",
"pytest",
],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
4000,
);
static PYDANTIC: Benchmark = Benchmark::new(
RealWorldProject {
name: "pydantic",
repository: "https://github.com/pydantic/pydantic",
commit: "0c4a22b64b23dfad27387750cf07487efc45eb05",
paths: &["pydantic"],
dependencies: &[
"annotated-types",
"pydantic-core",
"typing-extensions",
"typing-inspection",
],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY39,
},
7000,
);
static SYMPY: Benchmark = Benchmark::new(
RealWorldProject {
name: "sympy",
repository: "https://github.com/sympy/sympy",
commit: "22fc107a94eaabc4f6eb31470b39db65abb7a394",
paths: &["sympy"],
dependencies: &["mpmath"],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
13116,
);
static TANJUN: Benchmark = Benchmark::new(
RealWorldProject {
name: "tanjun",
repository: "https://github.com/FasterSpeeding/Tanjun",
commit: "69f40db188196bc59516b6c69849c2d85fbc2f4a",
paths: &["tanjun"],
dependencies: &["hikari", "alluka"],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
320,
);
static STATIC_FRAME: Benchmark = Benchmark::new(
RealWorldProject {
name: "static-frame",
repository: "https://github.com/static-frame/static-frame",
commit: "34962b41baca5e7f98f5a758d530bff02748a421",
paths: &["static_frame"],
// N.B. `arraykit` is installed as a dependency during mypy_primer runs,
// but it takes much longer to be installed in a Codspeed run than it does in a mypy_primer run
// (seems to be built from source on the Codspeed CI runners for some reason).
dependencies: &["numpy"],
max_dep_date: "2025-08-09",
python_version: PythonVersion::PY311,
},
1100,
);
#[track_caller]
fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
bencher
.with_inputs(|| benchmark.setup_iteration())
.bench_local_refs(|db| {
check_project(db, benchmark.project.name, benchmark.max_diagnostics);
});
}
#[bench(sample_size = 2, sample_count = 3)]
fn altair(bencher: Bencher) {
run_single_threaded(bencher, &ALTAIR);
}
#[bench(sample_size = 2, sample_count = 3)]
fn freqtrade(bencher: Bencher) {
run_single_threaded(bencher, &FREQTRADE);
}
#[bench(sample_size = 2, sample_count = 3)]
fn tanjun(bencher: Bencher) {
run_single_threaded(bencher, &TANJUN);
}
#[bench(sample_size = 2, sample_count = 3)]
fn pydantic(bencher: Bencher) {
run_single_threaded(bencher, &PYDANTIC);
}
#[bench(sample_size = 1, sample_count = 3)]
fn static_frame(bencher: Bencher) {
run_single_threaded(bencher, &STATIC_FRAME);
}
#[bench(sample_size = 1, sample_count = 2)]
fn colour_science(bencher: Bencher) {
run_single_threaded(bencher, &COLOUR_SCIENCE);
}
#[bench(sample_size = 1, sample_count = 2)]
fn pandas(bencher: Bencher) {
run_single_threaded(bencher, &PANDAS);
}
#[bench(sample_size = 1, sample_count = 2)]
fn sympy(bencher: Bencher) {
run_single_threaded(bencher, &SYMPY);
}
#[bench(sample_size = 3, sample_count = 8)]
fn multithreaded(bencher: Bencher) {
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
bencher
.with_inputs(|| ALTAIR.setup_iteration())
.bench_local_values(|db| {
thread_pool.install(|| {
check_project(&db, ALTAIR.project.name, ALTAIR.max_diagnostics);
db
})
});
}
fn main() {
ThreadPoolBuilder::new()
.num_threads(1)
.use_current_thread()
.build_global()
.unwrap();
let filter =
std::env::var("TY_LOG").unwrap_or("ty_walltime=info,ruff_benchmark=info".to_string());
let _logging = setup_logging_with_filter(&filter).expect("Filter to be valid");
// Salsa uses an optimized lookup for the ingredient index when using only a single database.
// This optimization results in at least a 10% speedup compared to when using multiple databases.
// To reduce noise, run one benchmark so that all benchmarks take the less optimized "not the first db"
// branch when looking up the ingredient index.
{
let db = TANJUN.setup_iteration();
check_project(&db, TANJUN.project.name, TANJUN.max_diagnostics);
}
divan::main();
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/benches/ty.rs | crates/ruff_benchmark/benches/ty.rs | #![allow(clippy::disallowed_names)]
use ruff_benchmark::criterion;
use ruff_benchmark::real_world_projects::{InstalledProject, RealWorldProject};
use std::fmt::Write;
use std::ops::Range;
use criterion::{BatchSize, Criterion, criterion_group, criterion_main};
use rayon::ThreadPoolBuilder;
use rustc_hash::FxHashSet;
use ruff_benchmark::TestFile;
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity};
use ruff_db::files::{File, system_path_to_file};
use ruff_db::source::source_text;
use ruff_db::system::{InMemorySystem, MemoryFileSystem, SystemPath, SystemPathBuf, TestSystem};
use ruff_python_ast::PythonVersion;
use ty_project::metadata::options::{EnvironmentOptions, Options};
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
use ty_project::watch::{ChangeEvent, ChangedKind};
use ty_project::{CheckMode, Db, ProjectDatabase, ProjectMetadata};
struct Case {
db: ProjectDatabase,
fs: MemoryFileSystem,
file: File,
file_path: SystemPathBuf,
}
// "https://raw.githubusercontent.com/python/cpython/8e8a4baf652f6e1cee7acde9d78c4b6154539748/Lib/tomllib";
static TOMLLIB_FILES: [TestFile; 4] = [
TestFile::new(
"tomllib/__init__.py",
include_str!("../resources/tomllib/__init__.py"),
),
TestFile::new(
"tomllib/_parser.py",
include_str!("../resources/tomllib/_parser.py"),
),
TestFile::new(
"tomllib/_re.py",
include_str!("../resources/tomllib/_re.py"),
),
TestFile::new(
"tomllib/_types.py",
include_str!("../resources/tomllib/_types.py"),
),
];
/// A structured set of fields we use to do diagnostic comparisons.
///
/// This helps assert benchmark results. Previously, we would compare
/// the actual diagnostic output, but using `insta` inside benchmarks is
/// problematic, and updating the strings otherwise when diagnostic rendering
/// changes is a PITA.
type KeyDiagnosticFields = (
DiagnosticId,
Option<&'static str>,
Option<Range<usize>>,
&'static str,
Severity,
);
static EXPECTED_TOMLLIB_DIAGNOSTICS: &[KeyDiagnosticFields] = &[];
fn tomllib_path(file: &TestFile) -> SystemPathBuf {
SystemPathBuf::from("src").join(file.name())
}
fn setup_tomllib_case() -> Case {
let system = TestSystem::default();
let fs = system.memory_file_system().clone();
fs.write_files_all(
TOMLLIB_FILES
.iter()
.map(|file| (tomllib_path(file), file.code().to_string())),
)
.unwrap();
let src_root = SystemPath::new("/src");
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
metadata.apply_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
..EnvironmentOptions::default()
}),
..Options::default()
});
let mut db = ProjectDatabase::new(metadata, system).unwrap();
let mut tomllib_files = FxHashSet::default();
let mut re: Option<File> = None;
for test_file in &TOMLLIB_FILES {
let file = system_path_to_file(&db, tomllib_path(test_file)).unwrap();
if test_file.name().ends_with("_re.py") {
re = Some(file);
}
tomllib_files.insert(file);
}
let re = re.unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project().set_open_files(&mut db, tomllib_files);
let re_path = re.path(&db).as_system_path().unwrap().to_owned();
Case {
db,
fs,
file: re,
file_path: re_path,
}
}
static RAYON_INITIALIZED: std::sync::Once = std::sync::Once::new();
fn setup_rayon() {
// Initialize the rayon thread pool outside the benchmark because it has a significant cost.
// We limit the thread pool to only one (the current thread) because we're focused on
// where ty spends time and less about how well the code runs concurrently.
// We might want to add a benchmark focusing on concurrency to detect congestion in the future.
RAYON_INITIALIZED.call_once(|| {
ThreadPoolBuilder::new()
.num_threads(1)
.use_current_thread()
.build_global()
.unwrap();
});
}
fn benchmark_incremental(criterion: &mut Criterion) {
fn setup() -> Case {
let case = setup_tomllib_case();
let result: Vec<_> = case.db.check();
assert_diagnostics(&case.db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
case.fs
.write_file_all(
&case.file_path,
format!(
"{}\n# A comment\n",
source_text(&case.db, case.file).as_str()
),
)
.unwrap();
case
}
fn incremental(case: &mut Case) {
let Case { db, .. } = case;
db.apply_changes(
vec![ChangeEvent::Changed {
path: case.file_path.clone(),
kind: ChangedKind::FileContent,
}],
None,
);
let result = db.check();
assert_eq!(result.len(), EXPECTED_TOMLLIB_DIAGNOSTICS.len());
}
setup_rayon();
criterion.bench_function("ty_check_file[incremental]", |b| {
b.iter_batched_ref(setup, incremental, BatchSize::SmallInput);
});
}
fn benchmark_cold(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_check_file[cold]", |b| {
b.iter_batched_ref(
setup_tomllib_case,
|case| {
let Case { db, .. } = case;
let result: Vec<_> = db.check();
assert_diagnostics(db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
},
BatchSize::SmallInput,
);
});
}
#[track_caller]
fn assert_diagnostics(db: &dyn Db, diagnostics: &[Diagnostic], expected: &[KeyDiagnosticFields]) {
let normalized: Vec<_> = diagnostics
.iter()
.map(|diagnostic| {
(
diagnostic.id(),
diagnostic
.primary_span()
.map(|span| span.expect_ty_file())
.map(|file| file.path(db).as_str()),
diagnostic
.primary_span()
.and_then(|span| span.range())
.map(Range::<usize>::from),
diagnostic.primary_message(),
diagnostic.severity(),
)
})
.collect();
assert_eq!(&normalized, expected);
}
fn setup_micro_case(code: &str) -> Case {
let system = TestSystem::default();
let fs = system.memory_file_system().clone();
let file_path = "src/test.py";
fs.write_file_all(
SystemPathBuf::from(file_path),
ruff_python_trivia::textwrap::dedent(code),
)
.unwrap();
let src_root = SystemPath::new("/src");
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
metadata.apply_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
..EnvironmentOptions::default()
}),
..Options::default()
});
let mut db = ProjectDatabase::new(metadata, system).unwrap();
let file = system_path_to_file(&db, SystemPathBuf::from(file_path)).unwrap();
db.set_check_mode(CheckMode::OpenFiles);
db.project()
.set_open_files(&mut db, FxHashSet::from_iter([file]));
let file_path = file.path(&db).as_system_path().unwrap().to_owned();
Case {
db,
fs,
file,
file_path,
}
}
fn benchmark_many_string_assignments(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[many_string_assignments]", |b| {
b.iter_batched_ref(
|| {
// This is a micro benchmark, but it is effectively identical to a code sample
// observed "in the wild":
setup_micro_case(
r#"
def f(x) -> str:
s = ""
# Each conditional doubles the size of the union of string literal types,
# so if we go up to attr10, we have 2**10 = 1024 string literal types
if x.attr1:
s += "attr1"
if x.attr2:
s += "attr2"
if x.attr3:
s += "attr3"
if x.attr4:
s += "attr4"
if x.attr5:
s += "attr5"
if x.attr6:
s += "attr6"
if x.attr7:
s += "attr7"
if x.attr8:
s += "attr8"
if x.attr9:
s += "attr9"
if x.attr10:
s += "attr10"
# The above checked how fast we are in building the union; this checks how
# we manage it once it is built. If implemented naively, this has to check
# each member of the union for compatibility with the Sized protocol.
if len(s) > 0:
s = s[:-3]
return s
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[many_tuple_assignments]", |b| {
b.iter_batched_ref(
|| {
// This is a micro benchmark, but it is effectively identical to a code sample
// observed in https://github.com/astral-sh/ty/issues/362
setup_micro_case(
r#"
def flag() -> bool:
return True
t = ()
if flag():
t += (1,)
if flag():
t += (2,)
if flag():
t += (3,)
if flag():
t += (4,)
if flag():
t += (5,)
if flag():
t += (6,)
if flag():
t += (7,)
if flag():
t += (8,)
# Perform some kind of operation on the union type
print(1 in t)
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
fn benchmark_tuple_implicit_instance_attributes(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[many_tuple_assignments]", |b| {
b.iter_batched_ref(
|| {
// This is a regression benchmark for a case that used to hang:
// https://github.com/astral-sh/ty/issues/765
setup_micro_case(
r#"
from typing import Any
class A:
foo: tuple[Any, ...]
class B(A):
def __init__(self, parent: "C", x: tuple[Any]):
self.foo = parent.foo + x
class C(A):
def __init__(self, parent: B, x: tuple[Any]):
self.foo = parent.foo + x
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
fn benchmark_complex_constrained_attributes_1(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[complex_constrained_attributes_1]", |b| {
b.iter_batched_ref(
|| {
// This is a regression benchmark for https://github.com/astral-sh/ty/issues/627.
// Before this was fixed, the following sample would take >1s to type check.
setup_micro_case(
r#"
class C:
def f(self: "C"):
if isinstance(self.a, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert!(!result.is_empty());
},
BatchSize::SmallInput,
);
});
}
fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[complex_constrained_attributes_2]", |b| {
b.iter_batched_ref(
|| {
// This is similar to the case above, but now the attributes are actually defined.
// https://github.com/astral-sh/ty/issues/711
setup_micro_case(
r#"
class C:
def f(self: "C"):
if isinstance(self.a, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
self.a = ""
self.b = ""
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
fn benchmark_complex_constrained_attributes_3(criterion: &mut Criterion) {
setup_rayon();
criterion.bench_function("ty_micro[complex_constrained_attributes_3]", |b| {
b.iter_batched_ref(
|| {
// This is a regression test for https://github.com/astral-sh/ty/issues/758
setup_micro_case(
r#"
class GridOut:
def __init__(self: "GridOut") -> None:
self._buffer = b""
def _read_size_or_line(self: "GridOut", size: int = -1):
if size > self._position:
size = self._position
pass
if size == 0:
return bytes()
while size > 0:
if self._buffer:
buf = self._buffer
self._buffer = b""
else:
buf = b""
if len(buf) > size:
self._buffer = buf
self._position -= len(self._buffer)
"#,
)
},
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
fn benchmark_many_enum_members(criterion: &mut Criterion) {
const NUM_ENUM_MEMBERS: usize = 512;
setup_rayon();
let mut code = String::new();
writeln!(&mut code, "from enum import Enum").ok();
writeln!(&mut code, "class E(Enum):").ok();
for i in 0..NUM_ENUM_MEMBERS {
writeln!(&mut code, " m{i} = {i}").ok();
}
writeln!(&mut code).ok();
for i in 0..NUM_ENUM_MEMBERS {
writeln!(&mut code, "print(E.m{i})").ok();
}
criterion.bench_function("ty_micro[many_enum_members]", |b| {
b.iter_batched_ref(
|| setup_micro_case(&code),
|case| {
let Case { db, .. } = case;
let result = db.check();
assert_eq!(result.len(), 0);
},
BatchSize::SmallInput,
);
});
}
struct ProjectBenchmark<'a> {
project: InstalledProject<'a>,
fs: MemoryFileSystem,
max_diagnostics: usize,
}
impl<'a> ProjectBenchmark<'a> {
fn new(project: RealWorldProject<'a>, max_diagnostics: usize) -> Self {
let setup_project = project.setup().expect("Failed to setup project");
let fs = setup_project
.copy_to_memory_fs()
.expect("Failed to copy project to memory fs");
Self {
project: setup_project,
fs,
max_diagnostics,
}
}
fn setup_iteration(&self) -> ProjectDatabase {
let system = TestSystem::new(InMemorySystem::from_memory_fs(self.fs.clone()));
let src_root = SystemPath::new("/");
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
metadata.apply_options(Options {
environment: Some(EnvironmentOptions {
python_version: Some(RangedValue::cli(self.project.config.python_version)),
python: Some(RelativePathBuf::cli(SystemPath::new(".venv"))),
..EnvironmentOptions::default()
}),
..Options::default()
});
let mut db = ProjectDatabase::new(metadata, system).unwrap();
db.project().set_included_paths(
&mut db,
self.project
.check_paths()
.iter()
.map(|path| SystemPathBuf::from(*path))
.collect(),
);
db
}
}
#[track_caller]
fn bench_project(benchmark: &ProjectBenchmark, criterion: &mut Criterion) {
fn check_project(db: &mut ProjectDatabase, project_name: &str, max_diagnostics: usize) {
let result = db.check();
let diagnostics = result.len();
if diagnostics > max_diagnostics {
let details = result
.into_iter()
.map(|diagnostic| diagnostic.concise_message().to_string())
.collect::<Vec<_>>()
.join("\n ");
assert!(
diagnostics <= max_diagnostics,
"{project_name}: Expected <={max_diagnostics} diagnostics but got {diagnostics}:\n {details}",
);
}
}
setup_rayon();
let mut group = criterion.benchmark_group("project");
group.sampling_mode(criterion::SamplingMode::Flat);
group.bench_function(benchmark.project.config.name, |b| {
b.iter_batched_ref(
|| benchmark.setup_iteration(),
|db| check_project(db, benchmark.project.config.name, benchmark.max_diagnostics),
BatchSize::SmallInput,
);
});
}
fn hydra(criterion: &mut Criterion) {
let benchmark = ProjectBenchmark::new(
RealWorldProject {
name: "hydra-zen",
repository: "https://github.com/mit-ll-responsible-ai/hydra-zen",
commit: "dd2b50a9614c6f8c46c5866f283c8f7e7a960aa8",
paths: &["src"],
dependencies: &["pydantic", "beartype", "hydra-core"],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY313,
},
100,
);
bench_project(&benchmark, criterion);
}
fn attrs(criterion: &mut Criterion) {
let benchmark = ProjectBenchmark::new(
RealWorldProject {
name: "attrs",
repository: "https://github.com/python-attrs/attrs",
commit: "a6ae894aad9bc09edc7cdad8c416898784ceec9b",
paths: &["src"],
dependencies: &[],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY313,
},
120,
);
bench_project(&benchmark, criterion);
}
fn anyio(criterion: &mut Criterion) {
let benchmark = ProjectBenchmark::new(
RealWorldProject {
name: "anyio",
repository: "https://github.com/agronholm/anyio",
commit: "561d81270a12f7c6bbafb5bc5fad99a2a13f96be",
paths: &["src"],
dependencies: &[],
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY313,
},
150,
);
bench_project(&benchmark, criterion);
}
fn datetype(criterion: &mut Criterion) {
let benchmark = ProjectBenchmark::new(
RealWorldProject {
name: "DateType",
repository: "https://github.com/glyph/DateType",
commit: "57c9c93cf2468069f72945fc04bf27b64100dad8",
paths: &["src"],
dependencies: &[],
max_dep_date: "2025-07-04",
python_version: PythonVersion::PY313,
},
2,
);
bench_project(&benchmark, criterion);
}
criterion_group!(check_file, benchmark_cold, benchmark_incremental);
criterion_group!(
micro,
benchmark_many_string_assignments,
benchmark_many_tuple_assignments,
benchmark_tuple_implicit_instance_attributes,
benchmark_complex_constrained_attributes_1,
benchmark_complex_constrained_attributes_2,
benchmark_complex_constrained_attributes_3,
benchmark_many_enum_members,
);
criterion_group!(project, anyio, attrs, hydra, datetype);
criterion_main!(check_file, micro, project);
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ruff_benchmark/benches/formatter.rs | crates/ruff_benchmark/benches/formatter.rs | use std::path::Path;
use ruff_benchmark::criterion::{
BenchmarkId, Criterion, Throughput, criterion_group, criterion_main,
};
use ruff_benchmark::{
LARGE_DATASET, NUMPY_CTYPESLIB, NUMPY_GLOBALS, PYDANTIC_TYPES, TestCase, UNICODE_PYPINYIN,
};
use ruff_python_formatter::{PreviewMode, PyFormatOptions, format_module_ast};
use ruff_python_parser::{Mode, ParseOptions, parse};
use ruff_python_trivia::CommentRanges;
#[cfg(target_os = "windows")]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(all(
not(target_os = "windows"),
not(target_os = "openbsd"),
any(
target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "powerpc64",
target_arch = "riscv64"
)
))]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
fn create_test_cases() -> Vec<TestCase> {
vec![
TestCase::fast(NUMPY_GLOBALS.clone()),
TestCase::fast(UNICODE_PYPINYIN.clone()),
TestCase::normal(PYDANTIC_TYPES.clone()),
TestCase::normal(NUMPY_CTYPESLIB.clone()),
TestCase::slow(LARGE_DATASET.clone()),
]
}
fn benchmark_formatter(criterion: &mut Criterion) {
let mut group = criterion.benchmark_group("formatter");
for case in create_test_cases() {
group.throughput(Throughput::Bytes(case.code().len() as u64));
group.bench_with_input(
BenchmarkId::from_parameter(case.name()),
&case,
|b, case| {
// Parse the source.
let parsed = parse(case.code(), ParseOptions::from(Mode::Module))
.expect("Input should be a valid Python code");
let comment_ranges = CommentRanges::from(parsed.tokens());
b.iter(|| {
let options = PyFormatOptions::from_extension(Path::new(case.name()))
.with_preview(PreviewMode::Enabled);
let formatted =
format_module_ast(&parsed, &comment_ranges, case.code(), options)
.expect("Formatting to succeed");
formatted.print().expect("Printing to succeed")
});
},
);
}
group.finish();
}
criterion_group!(formatter, benchmark_formatter);
criterion_main!(formatter);
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/dunder_all.rs | crates/ty_python_semantic/src/dunder_all.rs | use rustc_hash::FxHashSet;
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_python_ast::name::Name;
use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt};
use ruff_python_ast::{self as ast};
use ty_module_resolver::{ModuleName, resolve_module};
use crate::Db;
use crate::semantic_index::{SemanticIndex, semantic_index};
use crate::types::{Truthiness, Type, TypeContext, infer_expression_types};
fn dunder_all_names_cycle_initial(
_db: &dyn Db,
_id: salsa::Id,
_file: File,
) -> Option<FxHashSet<Name>> {
None
}
/// Returns a set of names in the `__all__` variable for `file`, [`None`] if it is not defined or
/// if it contains invalid elements.
#[salsa::tracked(returns(as_ref), cycle_initial=dunder_all_names_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn dunder_all_names(db: &dyn Db, file: File) -> Option<FxHashSet<Name>> {
let _span = tracing::trace_span!("dunder_all_names", file=?file.path(db)).entered();
let module = parsed_module(db, file).load(db);
let index = semantic_index(db, file);
let mut collector = DunderAllNamesCollector::new(db, file, index);
collector.visit_body(module.suite());
collector.into_names()
}
/// A visitor that collects the names in the `__all__` variable of a module.
struct DunderAllNamesCollector<'db> {
db: &'db dyn Db,
file: File,
/// The semantic index for the module.
index: &'db SemanticIndex<'db>,
/// The origin of the `__all__` variable in the current module, [`None`] if it is not defined.
origin: Option<DunderAllOrigin>,
/// A flag indicating whether the module uses unrecognized `__all__` idioms or there are any
/// invalid elements in `__all__`.
invalid: bool,
/// A set of names found in `__all__` for the current module.
names: FxHashSet<Name>,
}
impl<'db> DunderAllNamesCollector<'db> {
fn new(db: &'db dyn Db, file: File, index: &'db SemanticIndex<'db>) -> Self {
Self {
db,
file,
index,
origin: None,
invalid: false,
names: FxHashSet::default(),
}
}
/// Updates the origin of `__all__` in the current module.
///
/// This will clear existing names if the origin is changed to mimic the behavior of overriding
/// `__all__` in the current module.
fn update_origin(&mut self, origin: DunderAllOrigin) {
if self.origin.is_some() {
self.names.clear();
}
self.origin = Some(origin);
}
/// Extends the current set of names with the names from the given expression which can be
/// either a list/tuple/set of string-literal names or a module's `__all__` variable.
///
/// Returns `true` if the expression is a valid list/tuple/set or module `__all__`, `false` otherwise.
fn extend(&mut self, expr: &ast::Expr) -> bool {
match expr {
// `__all__ += [...]`
// `__all__.extend([...])`
ast::Expr::List(ast::ExprList { elts, .. })
| ast::Expr::Tuple(ast::ExprTuple { elts, .. })
| ast::Expr::Set(ast::ExprSet { elts, .. }) => self.add_names(elts),
// `__all__ += module.__all__`
// `__all__.extend(module.__all__)`
ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
if attr != "__all__" {
return false;
}
let Type::ModuleLiteral(module_literal) = self.standalone_expression_type(value)
else {
return false;
};
let Some(module_dunder_all_names) = module_literal
.module(self.db)
.file(self.db)
.and_then(|file| dunder_all_names(self.db, file))
else {
// The module either does not have a `__all__` variable or it is invalid.
return false;
};
self.names.extend(module_dunder_all_names.iter().cloned());
true
}
_ => false,
}
}
/// Processes a call idiom for `__all__` and updates the set of names accordingly.
///
/// Returns `true` if the call idiom is recognized and valid, `false` otherwise.
fn process_call_idiom(
&mut self,
function_name: &ast::Identifier,
arguments: &ast::Arguments,
) -> bool {
if arguments.len() != 1 {
return false;
}
let Some(argument) = arguments.find_positional(0) else {
return false;
};
match function_name.as_str() {
// `__all__.extend([...])`
// `__all__.extend(module.__all__)`
"extend" => {
if !self.extend(argument) {
return false;
}
}
// `__all__.append(...)`
"append" => {
let Some(name) = create_name(argument) else {
return false;
};
self.names.insert(name);
}
// `__all__.remove(...)`
"remove" => {
let Some(name) = create_name(argument) else {
return false;
};
self.names.remove(&name);
}
_ => return false,
}
true
}
/// Returns the names in `__all__` from the module imported from the given `import_from`
/// statement.
///
/// Returns [`None`] if module resolution fails, invalid syntax, or if the module does not have
/// a `__all__` variable.
fn dunder_all_names_for_import_from(
&self,
import_from: &ast::StmtImportFrom,
) -> Option<&'db FxHashSet<Name>> {
let module_name =
ModuleName::from_import_statement(self.db, self.file, import_from).ok()?;
let module = resolve_module(self.db, self.file, &module_name)?;
dunder_all_names(self.db, module.file(self.db)?)
}
/// Infer the type of a standalone expression.
///
/// # Panics
///
/// This function panics if `expr` was not marked as a standalone expression during semantic indexing.
fn standalone_expression_type(&self, expr: &ast::Expr) -> Type<'db> {
infer_expression_types(self.db, self.index.expression(expr), TypeContext::default())
.expression_type(expr)
}
/// Evaluate the given expression and return its truthiness.
///
/// Returns [`None`] if the expression type doesn't implement `__bool__` correctly.
fn evaluate_test_expr(&self, expr: &ast::Expr) -> Option<Truthiness> {
self.standalone_expression_type(expr).try_bool(self.db).ok()
}
/// Add valid names to the set.
///
/// Returns `false` if any of the names are invalid.
fn add_names(&mut self, exprs: &[ast::Expr]) -> bool {
for expr in exprs {
let Some(name) = create_name(expr) else {
return false;
};
self.names.insert(name);
}
true
}
/// Consumes `self` and returns the collected set of names.
///
/// Returns [`None`] if `__all__` is not defined in the current module or if it contains
/// invalid elements.
fn into_names(self) -> Option<FxHashSet<Name>> {
if self.origin.is_none() {
None
} else if self.invalid {
tracing::debug!("Invalid `__all__` in `{}`", self.file.path(self.db));
None
} else {
Some(self.names)
}
}
}
impl<'db> StatementVisitor<'db> for DunderAllNamesCollector<'db> {
fn visit_stmt(&mut self, stmt: &'db ast::Stmt) {
if self.invalid {
return;
}
match stmt {
ast::Stmt::ImportFrom(import_from @ ast::StmtImportFrom { names, .. }) => {
for ast::Alias { name, asname, .. } in names {
// `from module import *` where `module` is a module with a top-level `__all__`
// variable that contains the "__all__" element.
if name == "*" {
// Here, we need to use the `dunder_all_names` query instead of the
// `exported_names` query because a `*`-import does not import the
// `__all__` attribute unless it is explicitly included in the `__all__` of
// the module.
let Some(all_names) = self.dunder_all_names_for_import_from(import_from)
else {
self.invalid = true;
continue;
};
if all_names.contains(&Name::new_static("__all__")) {
self.update_origin(DunderAllOrigin::StarImport);
self.names.extend(all_names.iter().cloned());
}
} else {
// `from module import __all__`
// `from module import __all__ as __all__`
if name != "__all__"
|| asname.as_ref().is_some_and(|asname| asname != "__all__")
{
continue;
}
// We could do the `__all__` lookup lazily in case it's not needed. This would
// happen if a `__all__` is imported from another module but then the module
// redefines it. For example:
//
// ```python
// from module import __all__ as __all__
//
// __all__ = ["a", "b"]
// ```
//
// I'm avoiding this for now because it doesn't seem likely to happen in
// practice.
let Some(all_names) = self.dunder_all_names_for_import_from(import_from)
else {
self.invalid = true;
continue;
};
self.update_origin(DunderAllOrigin::ExternalModule);
self.names.extend(all_names.iter().cloned());
}
}
}
ast::Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
let [target] = targets.as_slice() else {
return;
};
if !is_dunder_all(target) {
return;
}
match &**value {
// `__all__ = [...]`
// `__all__ = (...)`
ast::Expr::List(ast::ExprList { elts, .. })
| ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => {
self.update_origin(DunderAllOrigin::CurrentModule);
if !self.add_names(elts) {
self.invalid = true;
}
}
_ => {
self.invalid = true;
}
}
}
ast::Stmt::AugAssign(ast::StmtAugAssign {
target,
op: ast::Operator::Add,
value,
..
}) => {
if self.origin.is_none() {
// We can't update `__all__` if it doesn't already exist.
return;
}
if !is_dunder_all(target) {
return;
}
if !self.extend(value) {
self.invalid = true;
}
}
ast::Stmt::AnnAssign(ast::StmtAnnAssign {
target,
value: Some(value),
..
}) => {
if !is_dunder_all(target) {
return;
}
match &**value {
// `__all__: list[str] = [...]`
// `__all__: tuple[str, ...] = (...)`
ast::Expr::List(ast::ExprList { elts, .. })
| ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => {
self.update_origin(DunderAllOrigin::CurrentModule);
if !self.add_names(elts) {
self.invalid = true;
}
}
_ => {
self.invalid = true;
}
}
}
ast::Stmt::Expr(ast::StmtExpr { value: expr, .. }) => {
if self.origin.is_none() {
// We can't update `__all__` if it doesn't already exist.
return;
}
let Some(ast::ExprCall {
func, arguments, ..
}) = expr.as_call_expr()
else {
return;
};
let Some(ast::ExprAttribute {
value,
attr,
ctx: ast::ExprContext::Load,
..
}) = func.as_attribute_expr()
else {
return;
};
if !is_dunder_all(value) {
return;
}
if !self.process_call_idiom(attr, arguments) {
self.invalid = true;
}
}
ast::Stmt::If(ast::StmtIf {
test,
body,
elif_else_clauses,
..
}) => match self.evaluate_test_expr(test) {
Some(Truthiness::AlwaysTrue) => self.visit_body(body),
Some(Truthiness::AlwaysFalse) => {
for ast::ElifElseClause { test, body, .. } in elif_else_clauses {
if let Some(test) = test {
match self.evaluate_test_expr(test) {
Some(Truthiness::AlwaysTrue) => {
self.visit_body(body);
break;
}
Some(Truthiness::AlwaysFalse) => {}
Some(Truthiness::Ambiguous) | None => {
break;
}
}
} else {
self.visit_body(body);
}
}
}
Some(Truthiness::Ambiguous) | None => {}
},
ast::Stmt::For(..)
| ast::Stmt::While(..)
| ast::Stmt::With(..)
| ast::Stmt::Match(..)
| ast::Stmt::Try(..) => {
walk_stmt(self, stmt);
}
ast::Stmt::FunctionDef(..) | ast::Stmt::ClassDef(..) => {
// Avoid recursing into any nested scopes as `__all__` is only valid at the module
// level.
}
ast::Stmt::AugAssign(..)
| ast::Stmt::AnnAssign(..)
| ast::Stmt::Delete(..)
| ast::Stmt::Return(..)
| ast::Stmt::Raise(..)
| ast::Stmt::Assert(..)
| ast::Stmt::Import(..)
| ast::Stmt::Global(..)
| ast::Stmt::Nonlocal(..)
| ast::Stmt::TypeAlias(..)
| ast::Stmt::Pass(..)
| ast::Stmt::Break(..)
| ast::Stmt::Continue(..)
| ast::Stmt::IpyEscapeCommand(..) => {}
}
}
}
#[derive(Debug, Clone)]
enum DunderAllOrigin {
/// The `__all__` variable is defined in the current module.
CurrentModule,
/// The `__all__` variable is imported from another module.
ExternalModule,
/// The `__all__` variable is imported from a module via a `*`-import.
StarImport,
}
/// Checks if the given expression is a name expression for `__all__`.
fn is_dunder_all(expr: &ast::Expr) -> bool {
matches!(expr, ast::Expr::Name(ast::ExprName { id, .. }) if id == "__all__")
}
/// Create and return a [`Name`] from the given expression, [`None`] if it is an invalid expression
/// for a `__all__` element.
fn create_name(expr: &ast::Expr) -> Option<Name> {
Some(Name::new(expr.as_string_literal_expr()?.value.to_str()))
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/db.rs | crates/ty_python_semantic/src/db.rs | use crate::AnalysisSettings;
use crate::lint::{LintRegistry, RuleSelection};
use ruff_db::files::File;
use ty_module_resolver::Db as ModuleResolverDb;
/// Database giving access to semantic information about a Python program.
#[salsa::db]
pub trait Db: ModuleResolverDb {
/// Returns `true` if the file should be checked.
fn should_check_file(&self, file: File) -> bool;
/// Resolves the rule selection for a given file.
fn rule_selection(&self, file: File) -> &RuleSelection;
fn lint_registry(&self) -> &LintRegistry;
fn analysis_settings(&self) -> &AnalysisSettings;
/// Whether ty is running with logging verbosity INFO or higher (`-v` or more).
fn verbose(&self) -> bool;
}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::{Arc, Mutex};
use crate::program::Program;
use crate::{
AnalysisSettings, ProgramSettings, PythonPlatform, PythonVersionSource,
PythonVersionWithSource, default_lint_registry,
};
use ty_module_resolver::SearchPathSettings;
use super::Db;
use crate::lint::{LintRegistry, RuleSelection};
use anyhow::Context;
use ruff_db::Db as SourceDb;
use ruff_db::files::{File, Files};
use ruff_db::system::{
DbWithTestSystem, DbWithWritableSystem as _, System, SystemPath, SystemPathBuf, TestSystem,
};
use ruff_db::vendored::VendoredFileSystem;
use ruff_python_ast::PythonVersion;
use ty_module_resolver::Db as ModuleResolverDb;
use ty_module_resolver::SearchPaths;
type Events = Arc<Mutex<Vec<salsa::Event>>>;
#[salsa::db]
#[derive(Clone)]
pub(crate) struct TestDb {
storage: salsa::Storage<Self>,
files: Files,
system: TestSystem,
vendored: VendoredFileSystem,
events: Events,
rule_selection: Arc<RuleSelection>,
analysis_settings: Arc<AnalysisSettings>,
}
impl TestDb {
pub(crate) fn new() -> Self {
let events = Events::default();
Self {
storage: salsa::Storage::new(Some(Box::new({
let events = events.clone();
move |event| {
tracing::trace!("event: {event:?}");
let mut events = events.lock().unwrap();
events.push(event);
}
}))),
system: TestSystem::default(),
vendored: ty_vendored::file_system().clone(),
events,
files: Files::default(),
rule_selection: Arc::new(RuleSelection::from_registry(default_lint_registry())),
analysis_settings: AnalysisSettings::default().into(),
}
}
/// Takes the salsa events.
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
let mut events = self.events.lock().unwrap();
std::mem::take(&mut *events)
}
/// Clears the salsa events.
///
/// ## Panics
/// If there are any pending salsa snapshots.
pub(crate) fn clear_salsa_events(&mut self) {
self.take_salsa_events();
}
}
impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem {
&self.system
}
fn test_system_mut(&mut self) -> &mut TestSystem {
&mut self.system
}
}
#[salsa::db]
impl SourceDb for TestDb {
fn vendored(&self) -> &VendoredFileSystem {
&self.vendored
}
fn system(&self) -> &dyn System {
&self.system
}
fn files(&self) -> &Files {
&self.files
}
fn python_version(&self) -> PythonVersion {
Program::get(self).python_version(self)
}
}
#[salsa::db]
impl Db for TestDb {
fn should_check_file(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}
fn rule_selection(&self, _file: File) -> &RuleSelection {
&self.rule_selection
}
fn lint_registry(&self) -> &LintRegistry {
default_lint_registry()
}
fn analysis_settings(&self) -> &AnalysisSettings {
&self.analysis_settings
}
fn verbose(&self) -> bool {
false
}
}
#[salsa::db]
impl ModuleResolverDb for TestDb {
fn search_paths(&self) -> &SearchPaths {
Program::get(self).search_paths(self)
}
}
#[salsa::db]
impl salsa::Database for TestDb {}
pub(crate) struct TestDbBuilder<'a> {
/// Target Python version
python_version: PythonVersion,
/// Target Python platform
python_platform: PythonPlatform,
/// Path and content pairs for files that should be present
files: Vec<(&'a str, &'a str)>,
}
impl<'a> TestDbBuilder<'a> {
pub(crate) fn new() -> Self {
Self {
python_version: PythonVersion::default(),
python_platform: PythonPlatform::default(),
files: vec![],
}
}
pub(crate) fn with_python_version(mut self, version: PythonVersion) -> Self {
self.python_version = version;
self
}
pub(crate) fn with_file(
mut self,
path: &'a (impl AsRef<SystemPath> + ?Sized),
content: &'a str,
) -> Self {
self.files.push((path.as_ref().as_str(), content));
self
}
pub(crate) fn build(self) -> anyhow::Result<TestDb> {
let mut db = TestDb::new();
let src_root = SystemPathBuf::from("/src");
db.memory_file_system().create_directory_all(&src_root)?;
db.write_files(self.files)
.context("Failed to write test files")?;
Program::from_settings(
&db,
ProgramSettings {
python_version: PythonVersionWithSource {
version: self.python_version,
source: PythonVersionSource::default(),
},
python_platform: self.python_platform,
search_paths: SearchPathSettings::new(vec![src_root])
.to_search_paths(db.system(), db.vendored())
.context("Invalid search path settings")?,
},
);
Ok(db)
}
}
pub(crate) fn setup_db() -> TestDb {
TestDbBuilder::new().build().expect("valid TestDb setup")
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/lib.rs | crates/ty_python_semantic/src/lib.rs | #![warn(
clippy::disallowed_methods,
reason = "Prefer System trait methods over std methods in ty crates"
)]
use std::hash::BuildHasherDefault;
use crate::lint::{LintRegistry, LintRegistryBuilder};
use crate::suppression::{
IGNORE_COMMENT_UNKNOWN_RULE, INVALID_IGNORE_COMMENT, UNUSED_IGNORE_COMMENT,
};
pub use db::Db;
pub use diagnostic::add_inferred_python_version_hint_to_diagnostic;
pub use program::{
Program, ProgramSettings, PythonVersionFileSource, PythonVersionSource, PythonVersionWithSource,
};
pub use python_platform::PythonPlatform;
use rustc_hash::FxHasher;
pub use semantic_model::{
Completion, HasDefinition, HasType, MemberDefinition, NameKind, SemanticModel,
};
pub use site_packages::{PythonEnvironment, SitePackagesPaths, SysPrefixPathOrigin};
pub use suppression::create_suppression_fix;
pub use ty_module_resolver::MisconfigurationMode;
pub use types::DisplaySettings;
pub use types::ide_support::{
ImportAliasResolution, ResolvedDefinition, definitions_for_attribute, definitions_for_bin_op,
definitions_for_imported_symbol, definitions_for_name, definitions_for_unary_op,
map_stub_definition,
};
pub mod ast_node_ref;
mod db;
mod dunder_all;
pub mod lint;
pub(crate) mod list;
mod node_key;
pub(crate) mod place;
mod program;
mod python_platform;
mod rank;
pub mod semantic_index;
mod semantic_model;
pub(crate) mod site_packages;
mod subscript;
mod suppression;
pub mod types;
mod unpack;
mod diagnostic;
#[cfg(feature = "testing")]
pub mod pull_types;
type FxOrderMap<K, V> = ordermap::map::OrderMap<K, V, BuildHasherDefault<FxHasher>>;
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
type FxIndexMap<K, V> = indexmap::IndexMap<K, V, BuildHasherDefault<FxHasher>>;
type FxIndexSet<V> = indexmap::IndexSet<V, BuildHasherDefault<FxHasher>>;
/// Returns the default registry with all known semantic lints.
pub fn default_lint_registry() -> &'static LintRegistry {
static REGISTRY: std::sync::LazyLock<LintRegistry> = std::sync::LazyLock::new(|| {
let mut registry = LintRegistryBuilder::default();
register_lints(&mut registry);
registry.build()
});
®ISTRY
}
/// Register all known semantic lints.
pub fn register_lints(registry: &mut LintRegistryBuilder) {
types::register_lints(registry);
registry.register_lint(&UNUSED_IGNORE_COMMENT);
registry.register_lint(&IGNORE_COMMENT_UNKNOWN_RULE);
registry.register_lint(&INVALID_IGNORE_COMMENT);
}
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
pub struct AnalysisSettings {
/// Whether errors can be suppressed with `type: ignore` comments.
///
/// If set to false, ty won't:
///
/// * allow suppressing errors with `type: ignore` comments
/// * report unused `type: ignore` comments
/// * report invalid `type: ignore` comments
pub respect_type_ignore_comments: bool,
}
impl Default for AnalysisSettings {
fn default() -> Self {
Self {
respect_type_ignore_comments: true,
}
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/place.rs | crates/ty_python_semantic/src/place.rs | use ruff_db::files::File;
use ruff_python_ast::PythonVersion;
use ty_module_resolver::{
KnownModule, Module, ModuleName, file_to_module, resolve_module_confident,
};
use crate::dunder_all::dunder_all_names;
use crate::semantic_index::definition::{Definition, DefinitionState};
use crate::semantic_index::place::{PlaceExprRef, ScopedPlaceId};
use crate::semantic_index::scope::ScopeId;
use crate::semantic_index::{
BindingWithConstraints, BindingWithConstraintsIterator, DeclarationsIterator, place_table,
};
use crate::semantic_index::{DeclarationWithConstraint, global_scope, use_def_map};
use crate::types::{
ApplyTypeMappingVisitor, DynamicType, KnownClass, MaterializationKind, MemberLookupPolicy,
Truthiness, Type, TypeAndQualifiers, TypeQualifiers, UnionBuilder, UnionType, binding_type,
declaration_type, todo_type,
};
use crate::{Db, FxOrderSet, Program};
pub(crate) use implicit_globals::{
module_type_implicit_global_declaration, module_type_implicit_global_symbol,
};
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, get_size2::GetSize)]
pub(crate) enum Definedness {
AlwaysDefined,
PossiblyUndefined,
}
impl Definedness {
pub(crate) const fn max(self, other: Self) -> Self {
match (self, other) {
(Definedness::AlwaysDefined, _) | (_, Definedness::AlwaysDefined) => {
Definedness::AlwaysDefined
}
(Definedness::PossiblyUndefined, Definedness::PossiblyUndefined) => {
Definedness::PossiblyUndefined
}
}
}
}
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, get_size2::GetSize)]
pub(crate) enum TypeOrigin {
Declared,
Inferred,
}
impl TypeOrigin {
pub(crate) const fn is_declared(self) -> bool {
matches!(self, TypeOrigin::Declared)
}
pub(crate) const fn merge(self, other: Self) -> Self {
match (self, other) {
(TypeOrigin::Declared, TypeOrigin::Declared) => TypeOrigin::Declared,
_ => TypeOrigin::Inferred,
}
}
}
/// Whether a place's type should be widened with `Unknown` when accessed publicly.
///
/// For undeclared public symbols (e.g., class attributes without type annotations),
/// the gradual typing guarantee requires that we consider them as potentially
/// modified externally, so their type is widened to a union with `Unknown`.
///
/// This enum tracks whether such widening should be applied, allowing callers
/// to access either the raw inferred type or the widened public type.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Default, get_size2::GetSize)]
pub(crate) enum Widening {
/// The type should not be widened with `Unknown`.
#[default]
None,
/// The type should be widened with `Unknown` when accessed publicly.
WithUnknown,
}
impl Widening {
/// Apply widening to the type if this is `WithUnknown`.
pub(crate) fn apply_if_needed<'db>(self, db: &'db dyn Db, ty: Type<'db>) -> Type<'db> {
match self {
Self::None => ty,
Self::WithUnknown => UnionType::from_elements(db, [Type::unknown(), ty]),
}
}
}
/// The result of a place lookup, which can either be a (possibly undefined) type
/// or a completely undefined place.
///
/// If a place has both a binding and a declaration, the result of the binding is used.
///
/// Consider this example:
/// ```py
/// bound = 1
/// declared: int
///
/// if flag:
/// possibly_unbound = 2
/// possibly_undeclared: int
///
/// if flag:
/// bound_or_declared = 1
/// else:
/// bound_or_declared: int
/// ```
///
/// If we look up places in this scope, we would get the following results:
/// ```rs
/// bound: Place::Defined(Literal[1], TypeOrigin::Inferred, Definedness::AlwaysDefined, _),
/// declared: Place::Defined(int, TypeOrigin::Declared, Definedness::AlwaysDefined, _),
/// possibly_unbound: Place::Defined(Literal[2], TypeOrigin::Inferred, Definedness::PossiblyUndefined, _),
/// possibly_undeclared: Place::Defined(int, TypeOrigin::Declared, Definedness::PossiblyUndefined, _),
/// bound_or_declared: Place::Defined(Literal[1], TypeOrigin::Inferred, Definedness::PossiblyUndefined, _),
/// non_existent: Place::Undefined,
/// ```
#[derive(Debug, Clone, Copy, PartialEq, Eq, salsa::Update, get_size2::GetSize)]
pub(crate) enum Place<'db> {
Defined(Type<'db>, TypeOrigin, Definedness, Widening),
Undefined,
}
impl<'db> Place<'db> {
/// Constructor that creates a [`Place`] with type origin [`TypeOrigin::Inferred`] and definedness [`Definedness::AlwaysDefined`].
pub(crate) fn bound(ty: impl Into<Type<'db>>) -> Self {
Place::Defined(
ty.into(),
TypeOrigin::Inferred,
Definedness::AlwaysDefined,
Widening::None,
)
}
/// Constructor that creates a [`Place`] with type origin [`TypeOrigin::Declared`] and definedness [`Definedness::AlwaysDefined`].
pub(crate) fn declared(ty: impl Into<Type<'db>>) -> Self {
Place::Defined(
ty.into(),
TypeOrigin::Declared,
Definedness::AlwaysDefined,
Widening::None,
)
}
/// Constructor that creates a [`Place`] with a [`crate::types::TodoType`] type
/// and definedness [`Definedness::AlwaysDefined`].
#[allow(unused_variables)] // Only unused in release builds
pub(crate) fn todo(message: &'static str) -> Self {
Place::Defined(
todo_type!(message),
TypeOrigin::Inferred,
Definedness::AlwaysDefined,
Widening::None,
)
}
pub(crate) fn is_undefined(&self) -> bool {
matches!(self, Place::Undefined)
}
/// Returns the type of the place, ignoring possible undefinedness.
///
/// If the place is *definitely* undefined, this function will return `None`. Otherwise,
/// if there is at least one control-flow path where the place is defined, return the type.
pub(crate) fn ignore_possibly_undefined(&self) -> Option<Type<'db>> {
match self {
Place::Defined(ty, _, _, _) => Some(*ty),
Place::Undefined => None,
}
}
/// Returns the type of the place without widening applied.
///
/// The stored type is always the unwidened type. Widening (union with `Unknown`)
/// is applied lazily when converting to `LookupResult`.
pub(crate) fn unwidened_type(&self) -> Option<Type<'db>> {
match self {
Place::Defined(ty, _, _, _) => Some(*ty),
Place::Undefined => None,
}
}
#[cfg(test)]
#[track_caller]
pub(crate) fn expect_type(self) -> Type<'db> {
self.ignore_possibly_undefined()
.expect("Expected a (possibly undefined) type, not an undefined place")
}
#[must_use]
pub(crate) fn map_type(self, f: impl FnOnce(Type<'db>) -> Type<'db>) -> Place<'db> {
match self {
Place::Defined(ty, origin, definedness, widening) => {
Place::Defined(f(ty), origin, definedness, widening)
}
Place::Undefined => Place::Undefined,
}
}
/// Set the widening mode for this place.
#[must_use]
pub(crate) fn with_widening(self, widening: Widening) -> Place<'db> {
match self {
Place::Defined(ty, origin, definedness, _) => {
Place::Defined(ty, origin, definedness, widening)
}
Place::Undefined => Place::Undefined,
}
}
#[must_use]
pub(crate) fn with_qualifiers(self, qualifiers: TypeQualifiers) -> PlaceAndQualifiers<'db> {
PlaceAndQualifiers {
place: self,
qualifiers,
}
}
/// Try to call `__get__(None, owner)` on the type of this place (not on the meta type).
/// If it succeeds, return the `__get__` return type. Otherwise, returns the original place.
/// This is used to resolve (potential) descriptor attributes.
pub(crate) fn try_call_dunder_get(self, db: &'db dyn Db, owner: Type<'db>) -> Place<'db> {
match self {
Place::Defined(Type::Union(union), origin, definedness, widening) => union
.map_with_boundness(db, |elem| {
Place::Defined(*elem, origin, definedness, widening)
.try_call_dunder_get(db, owner)
}),
Place::Defined(Type::Intersection(intersection), origin, definedness, widening) => {
intersection.map_with_boundness(db, |elem| {
Place::Defined(*elem, origin, definedness, widening)
.try_call_dunder_get(db, owner)
})
}
Place::Defined(self_ty, origin, definedness, widening) => {
if let Some((dunder_get_return_ty, _)) =
self_ty.try_call_dunder_get(db, Type::none(db), owner)
{
Place::Defined(dunder_get_return_ty, origin, definedness, widening)
} else {
self
}
}
Place::Undefined => Place::Undefined,
}
}
pub(crate) const fn is_definitely_bound(&self) -> bool {
matches!(self, Place::Defined(_, _, Definedness::AlwaysDefined, _))
}
}
impl<'db> From<LookupResult<'db>> for PlaceAndQualifiers<'db> {
fn from(value: LookupResult<'db>) -> Self {
match value {
Ok(type_and_qualifiers) => Place::bound(type_and_qualifiers.inner_type())
.with_qualifiers(type_and_qualifiers.qualifiers()),
Err(LookupError::Undefined(qualifiers)) => Place::Undefined.with_qualifiers(qualifiers),
Err(LookupError::PossiblyUndefined(type_and_qualifiers)) => Place::Defined(
type_and_qualifiers.inner_type(),
TypeOrigin::Inferred,
Definedness::PossiblyUndefined,
Widening::None,
)
.with_qualifiers(type_and_qualifiers.qualifiers()),
}
}
}
/// Possible ways in which a place lookup can (possibly or definitely) fail.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub(crate) enum LookupError<'db> {
Undefined(TypeQualifiers),
PossiblyUndefined(TypeAndQualifiers<'db>),
}
impl<'db> LookupError<'db> {
/// Fallback (wholly or partially) to `fallback` to create a new [`LookupResult`].
pub(crate) fn or_fall_back_to(
self,
db: &'db dyn Db,
fallback: PlaceAndQualifiers<'db>,
) -> LookupResult<'db> {
let fallback = fallback.into_lookup_result(db);
match (&self, &fallback) {
(LookupError::Undefined(_), _) => fallback,
(LookupError::PossiblyUndefined { .. }, Err(LookupError::Undefined(_))) => Err(self),
(LookupError::PossiblyUndefined(ty), Ok(ty2)) => Ok(TypeAndQualifiers::new(
UnionType::from_elements(db, [ty.inner_type(), ty2.inner_type()]),
ty.origin().merge(ty2.origin()),
ty.qualifiers().union(ty2.qualifiers()),
)),
(LookupError::PossiblyUndefined(ty), Err(LookupError::PossiblyUndefined(ty2))) => {
Err(LookupError::PossiblyUndefined(TypeAndQualifiers::new(
UnionType::from_elements(db, [ty.inner_type(), ty2.inner_type()]),
ty.origin().merge(ty2.origin()),
ty.qualifiers().union(ty2.qualifiers()),
)))
}
}
}
}
/// A [`Result`] type in which the `Ok` variant represents a definitely bound place
/// and the `Err` variant represents a place that is either definitely or possibly unbound.
///
/// Note that this type is exactly isomorphic to [`Place`].
/// In the future, we could possibly consider removing `Place` and using this type everywhere instead.
pub(crate) type LookupResult<'db> = Result<TypeAndQualifiers<'db>, LookupError<'db>>;
/// Infer the public type of a symbol (its type as seen from outside its scope) in the given
/// `scope`.
#[allow(unused)]
pub(crate) fn symbol<'db>(
db: &'db dyn Db,
scope: ScopeId<'db>,
name: &str,
considered_definitions: ConsideredDefinitions,
) -> PlaceAndQualifiers<'db> {
symbol_impl(
db,
scope,
name,
RequiresExplicitReExport::No,
considered_definitions,
)
}
/// Infer the public type of a place (its type as seen from outside its scope) in the given
/// `scope`.
pub(crate) fn place<'db>(
db: &'db dyn Db,
scope: ScopeId<'db>,
member: PlaceExprRef,
considered_definitions: ConsideredDefinitions,
) -> PlaceAndQualifiers<'db> {
place_impl(
db,
scope,
member,
RequiresExplicitReExport::No,
considered_definitions,
)
}
/// Infers the public type of an explicit module-global symbol as seen from within the same file.
///
/// Note that all global scopes also include various "implicit globals" such as `__name__`,
/// `__doc__` and `__file__`. This function **does not** consider those symbols; it will return
/// `Place::Undefined` for them. Use the (currently test-only) `global_symbol` query to also include
/// those additional symbols.
///
/// Use [`imported_symbol`] to perform the lookup as seen from outside the file (e.g. via imports).
pub(crate) fn explicit_global_symbol<'db>(
db: &'db dyn Db,
file: File,
name: &str,
) -> PlaceAndQualifiers<'db> {
symbol_impl(
db,
global_scope(db, file),
name,
RequiresExplicitReExport::No,
ConsideredDefinitions::AllReachable,
)
}
/// Infers the public type of an explicit module-global symbol as seen from within the same file.
///
/// Unlike [`explicit_global_symbol`], this function also considers various "implicit globals"
/// such as `__name__`, `__doc__` and `__file__`. These are looked up as attributes on `types.ModuleType`
/// rather than being looked up as symbols explicitly defined/declared in the global scope.
///
/// Use [`imported_symbol`] to perform the lookup as seen from outside the file (e.g. via imports).
#[allow(unused)]
pub(crate) fn global_symbol<'db>(
db: &'db dyn Db,
file: File,
name: &str,
) -> PlaceAndQualifiers<'db> {
explicit_global_symbol(db, file, name)
.or_fall_back_to(db, || module_type_implicit_global_symbol(db, name))
}
/// Infers the public type of an imported symbol.
///
/// If `requires_explicit_reexport` is [`None`], it will be inferred from the file's source type.
/// For stub files, explicit re-export will be required, while for non-stub files, it will not.
pub(crate) fn imported_symbol<'db>(
db: &'db dyn Db,
file: File,
name: &str,
requires_explicit_reexport: Option<RequiresExplicitReExport>,
) -> PlaceAndQualifiers<'db> {
let requires_explicit_reexport = requires_explicit_reexport.unwrap_or_else(|| {
if file.is_stub(db) {
RequiresExplicitReExport::Yes
} else {
RequiresExplicitReExport::No
}
});
// If it's not found in the global scope, check if it's present as an instance on
// `types.ModuleType` or `builtins.object`.
//
// We do a more limited version of this in `module_type_implicit_global_symbol`,
// but there are two crucial differences here:
// - If a member is looked up as an attribute, `__init__` is also available on the module, but
// it isn't available as a global from inside the module
// - If a member is looked up as an attribute, members on `builtins.object` are also available
// (because `types.ModuleType` inherits from `object`); these attributes are also not
// available as globals from inside the module.
//
// The same way as in `module_type_implicit_global_symbol`, however, we need to be careful to
// ignore `__getattr__`. Typeshed has a fake `__getattr__` on `types.ModuleType` to help out with
// dynamic imports; we shouldn't use it for `ModuleLiteral` types where we know exactly which
// module we're dealing with.
symbol_impl(
db,
global_scope(db, file),
name,
requires_explicit_reexport,
ConsideredDefinitions::EndOfScope,
)
.or_fall_back_to(db, || {
if name == "__getattr__" {
Place::Undefined.into()
} else if name == "__builtins__" {
Place::bound(Type::any()).into()
} else {
KnownClass::ModuleType
.to_instance(db)
.member_lookup_with_policy(db, name.into(), MemberLookupPolicy::NO_GETATTR_LOOKUP)
}
})
}
/// Lookup the type of `symbol` in the builtins namespace.
///
/// Returns `Place::Undefined` if the `builtins` module isn't available for some reason.
///
/// Note that this function is only intended for use in the context of the builtins *namespace*
/// and should not be used when a symbol is being explicitly imported from the `builtins` module
/// (e.g. `from builtins import int`).
pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> PlaceAndQualifiers<'db> {
let resolver = |module: Module<'_>| {
let file = module.file(db)?;
let found_symbol = symbol_impl(
db,
global_scope(db, file),
symbol,
RequiresExplicitReExport::Yes,
ConsideredDefinitions::EndOfScope,
)
.or_fall_back_to(db, || {
// We're looking up in the builtins namespace and not the module, so we should
// do the normal lookup in `types.ModuleType` and not the special one as in
// `imported_symbol`.
module_type_implicit_global_symbol(db, symbol)
});
// If this symbol is not present in project-level builtins, search in the default ones.
found_symbol
.ignore_possibly_undefined()
.map(|_| found_symbol)
};
resolve_module_confident(db, &ModuleName::new_static("__builtins__").unwrap())
.and_then(&resolver)
.or_else(|| resolve_module_confident(db, &KnownModule::Builtins.name()).and_then(resolver))
.unwrap_or_default()
}
/// Lookup the type of `symbol` in a given known module.
///
/// Returns `Place::Undefined` if the given known module cannot be resolved for some reason.
pub(crate) fn known_module_symbol<'db>(
db: &'db dyn Db,
known_module: KnownModule,
symbol: &str,
) -> PlaceAndQualifiers<'db> {
resolve_module_confident(db, &known_module.name())
.and_then(|module| {
let file = module.file(db)?;
Some(imported_symbol(db, file, symbol, None))
})
.unwrap_or_default()
}
/// Lookup the type of `symbol` in the `typing` module namespace.
///
/// Returns `Place::Undefined` if the `typing` module isn't available for some reason.
#[inline]
#[cfg(test)]
pub(crate) fn typing_symbol<'db>(db: &'db dyn Db, symbol: &str) -> PlaceAndQualifiers<'db> {
known_module_symbol(db, KnownModule::Typing, symbol)
}
/// Lookup the type of `symbol` in the `typing_extensions` module namespace.
///
/// Returns `Place::Undefined` if the `typing_extensions` module isn't available for some reason.
#[inline]
pub(crate) fn typing_extensions_symbol<'db>(
db: &'db dyn Db,
symbol: &str,
) -> PlaceAndQualifiers<'db> {
known_module_symbol(db, KnownModule::TypingExtensions, symbol)
}
/// Get the `builtins` module scope.
///
/// Can return `None` if a custom typeshed is used that is missing `builtins.pyi`.
pub(crate) fn builtins_module_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
core_module_scope(db, KnownModule::Builtins)
}
/// Get the scope of a core stdlib module.
///
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
fn core_module_scope(db: &dyn Db, core_module: KnownModule) -> Option<ScopeId<'_>> {
let module = resolve_module_confident(db, &core_module.name())?;
Some(global_scope(db, module.file(db)?))
}
/// Infer the combined type from an iterator of bindings, and return it
/// together with boundness information in a [`Place`].
///
/// The type will be a union if there are multiple bindings with different types.
pub(super) fn place_from_bindings<'db>(
db: &'db dyn Db,
bindings_with_constraints: BindingWithConstraintsIterator<'_, 'db>,
) -> PlaceWithDefinition<'db> {
place_from_bindings_impl(db, bindings_with_constraints, RequiresExplicitReExport::No)
}
/// Build a declared type from a [`DeclarationsIterator`].
///
/// If there is only one declaration, or all declarations declare the same type, returns
/// `Ok(..)`. If there are conflicting declarations, returns an `Err(..)` variant with
/// a union of the declared types as well as a list of all conflicting types.
///
/// This function also returns declaredness information (see [`Place`]) and a set of
/// [`TypeQualifiers`] that have been specified on the declaration(s).
pub(crate) fn place_from_declarations<'db>(
db: &'db dyn Db,
declarations: DeclarationsIterator<'_, 'db>,
) -> PlaceFromDeclarationsResult<'db> {
place_from_declarations_impl(db, declarations, RequiresExplicitReExport::No)
}
type DeclaredTypeAndConflictingTypes<'db> = (
TypeAndQualifiers<'db>,
Option<Box<indexmap::set::Slice<Type<'db>>>>,
);
/// The result of looking up a declared type from declarations; see [`place_from_declarations`].
pub(crate) struct PlaceFromDeclarationsResult<'db> {
place_and_quals: PlaceAndQualifiers<'db>,
conflicting_types: Option<Box<indexmap::set::Slice<Type<'db>>>>,
/// Contains the first reachable declaration for this place, if any.
/// This field is used for backreferences in diagnostics.
pub(crate) first_declaration: Option<Definition<'db>>,
}
impl<'db> PlaceFromDeclarationsResult<'db> {
fn conflict(
place_and_quals: PlaceAndQualifiers<'db>,
conflicting_types: Box<indexmap::set::Slice<Type<'db>>>,
first_declaration: Option<Definition<'db>>,
) -> Self {
PlaceFromDeclarationsResult {
place_and_quals,
conflicting_types: Some(conflicting_types),
first_declaration,
}
}
pub(crate) fn ignore_conflicting_declarations(self) -> PlaceAndQualifiers<'db> {
self.place_and_quals
}
pub(crate) fn into_place_and_conflicting_declarations(
self,
) -> (
PlaceAndQualifiers<'db>,
Option<Box<indexmap::set::Slice<Type<'db>>>>,
) {
(self.place_and_quals, self.conflicting_types)
}
}
/// A type with declaredness information, and a set of type qualifiers.
///
/// This is used to represent the result of looking up the declared type. Consider this
/// example:
/// ```py
/// class C:
/// if flag:
/// variable: ClassVar[int]
/// ```
/// If we look up the declared type of `variable` in the scope of class `C`, we will get
/// the type `int`, a "declaredness" of [`Definedness::PossiblyUndefined`], and the information
/// that this comes with a [`CLASS_VAR`] type qualifier.
///
/// [`CLASS_VAR`]: crate::types::TypeQualifiers::CLASS_VAR
#[derive(Debug, Clone, Copy, PartialEq, Eq, salsa::Update, get_size2::GetSize)]
pub(crate) struct PlaceAndQualifiers<'db> {
pub(crate) place: Place<'db>,
pub(crate) qualifiers: TypeQualifiers,
}
impl Default for PlaceAndQualifiers<'_> {
fn default() -> Self {
PlaceAndQualifiers {
place: Place::Undefined,
qualifiers: TypeQualifiers::empty(),
}
}
}
impl<'db> PlaceAndQualifiers<'db> {
/// Constructor that creates a [`PlaceAndQualifiers`] instance with a [`TodoType`] type
/// and no qualifiers.
///
/// [`TodoType`]: crate::types::TodoType
pub(crate) fn todo(message: &'static str) -> Self {
Self {
place: Place::todo(message),
qualifiers: TypeQualifiers::empty(),
}
}
pub(crate) fn unbound() -> Self {
PlaceAndQualifiers {
place: Place::Undefined,
qualifiers: TypeQualifiers::empty(),
}
}
pub(crate) fn is_undefined(&self) -> bool {
self.place.is_undefined()
}
pub(crate) fn ignore_possibly_undefined(&self) -> Option<Type<'db>> {
self.place.ignore_possibly_undefined()
}
/// Returns `true` if the place has a `ClassVar` type qualifier.
pub(crate) fn is_class_var(&self) -> bool {
self.qualifiers.contains(TypeQualifiers::CLASS_VAR)
}
/// Returns `true` if the place has a `InitVar` type qualifier.
pub(crate) fn is_init_var(&self) -> bool {
self.qualifiers.contains(TypeQualifiers::INIT_VAR)
}
/// Returns `true` if the place has a `Required` type qualifier.
pub(crate) fn is_required(&self) -> bool {
self.qualifiers.contains(TypeQualifiers::REQUIRED)
}
/// Returns `true` if the place has a `NotRequired` type qualifier.
pub(crate) fn is_not_required(&self) -> bool {
self.qualifiers.contains(TypeQualifiers::NOT_REQUIRED)
}
/// Returns `true` if the place has a `ReadOnly` type qualifier.
pub(crate) fn is_read_only(&self) -> bool {
self.qualifiers.contains(TypeQualifiers::READ_ONLY)
}
/// Returns `Some(…)` if the place is qualified with `typing.Final` without a specified type.
pub(crate) fn is_bare_final(&self) -> Option<TypeQualifiers> {
match self {
PlaceAndQualifiers { place, qualifiers }
if (qualifiers.contains(TypeQualifiers::FINAL)
&& place
.ignore_possibly_undefined()
.is_some_and(|ty| ty.is_unknown())) =>
{
Some(*qualifiers)
}
_ => None,
}
}
#[must_use]
pub(crate) fn map_type(
self,
f: impl FnOnce(Type<'db>) -> Type<'db>,
) -> PlaceAndQualifiers<'db> {
PlaceAndQualifiers {
place: self.place.map_type(f),
qualifiers: self.qualifiers,
}
}
pub(crate) fn materialize(
self,
db: &'db dyn Db,
materialization_kind: MaterializationKind,
visitor: &ApplyTypeMappingVisitor<'db>,
) -> PlaceAndQualifiers<'db> {
self.map_type(|ty| ty.materialize(db, materialization_kind, visitor))
}
/// Transform place and qualifiers into a [`LookupResult`],
/// a [`Result`] type in which the `Ok` variant represents a definitely defined place
/// and the `Err` variant represents a place that is either definitely or possibly undefined.
///
/// For places marked with `Widening::WithUnknown`, this applies the gradual typing guarantee
/// by creating a union with `Unknown`.
pub(crate) fn into_lookup_result(self, db: &'db dyn Db) -> LookupResult<'db> {
match self {
PlaceAndQualifiers {
place: Place::Defined(ty, origin, Definedness::AlwaysDefined, widening),
qualifiers,
} => {
let ty = widening.apply_if_needed(db, ty);
Ok(TypeAndQualifiers::new(ty, origin, qualifiers))
}
PlaceAndQualifiers {
place: Place::Defined(ty, origin, Definedness::PossiblyUndefined, widening),
qualifiers,
} => {
let ty = widening.apply_if_needed(db, ty);
Err(LookupError::PossiblyUndefined(TypeAndQualifiers::new(
ty, origin, qualifiers,
)))
}
PlaceAndQualifiers {
place: Place::Undefined,
qualifiers,
} => Err(LookupError::Undefined(qualifiers)),
}
}
/// Safely unwrap the place and the qualifiers into a [`TypeAndQualifiers`].
///
/// If the place is definitely unbound or possibly unbound, it will be transformed into a
/// [`LookupError`] and `diagnostic_fn` will be applied to the error value before returning
/// the result of `diagnostic_fn` (which will be a [`TypeAndQualifiers`]). This allows the caller
/// to ensure that a diagnostic is emitted if the place is possibly or definitely unbound.
pub(crate) fn unwrap_with_diagnostic(
self,
db: &'db dyn Db,
diagnostic_fn: impl FnOnce(LookupError<'db>) -> TypeAndQualifiers<'db>,
) -> TypeAndQualifiers<'db> {
self.into_lookup_result(db).unwrap_or_else(diagnostic_fn)
}
/// Fallback (partially or fully) to another place if `self` is partially or fully unbound.
///
/// 1. If `self` is definitely bound, return `self` without evaluating `fallback_fn()`.
/// 2. Else, evaluate `fallback_fn()`:
/// 1. If `self` is definitely unbound, return the result of `fallback_fn()`.
/// 2. Else, if `fallback` is definitely unbound, return `self`.
/// 3. Else, if `self` is possibly unbound and `fallback` is definitely bound,
/// return `Place(<union of self-type and fallback-type>, Definedness::AlwaysDefined)`
/// 4. Else, if `self` is possibly unbound and `fallback` is possibly unbound,
/// return `Place(<union of self-type and fallback-type>, Definedness::PossiblyUndefined)`
#[must_use]
pub(crate) fn or_fall_back_to(
self,
db: &'db dyn Db,
fallback_fn: impl FnOnce() -> PlaceAndQualifiers<'db>,
) -> Self {
self.into_lookup_result(db)
.or_else(|lookup_error| lookup_error.or_fall_back_to(db, fallback_fn()))
.into()
}
pub(crate) fn cycle_normalized(
self,
db: &'db dyn Db,
previous_place: Self,
cycle: &salsa::Cycle,
) -> Self {
let place = match (previous_place.place, self.place) {
// In fixed-point iteration of type inference, the member type must be monotonically widened and not "oscillate".
// Here, monotonicity is guaranteed by pre-unioning the type of the previous iteration into the current result.
(
Place::Defined(prev_ty, _, _, _),
Place::Defined(ty, origin, definedness, widening),
) => Place::Defined(
ty.cycle_normalized(db, prev_ty, cycle),
origin,
definedness,
widening,
),
// If a `Place` in the current cycle is `Defined` but `Undefined` in the previous cycle,
// that means that its definedness depends on the truthiness of the previous cycle value.
// In this case, the definedness of the current cycle `Place` is set to `PossiblyUndefined`.
// Actually, this branch is unreachable. We evaluate the truthiness of non-definitely-bound places as Ambiguous (see #19579),
// so convergence is guaranteed without resorting to this handling.
// However, the handling described above may reduce the exactness of reachability analysis,
// so it may be better to remove it. In that case, this branch is necessary.
(Place::Undefined, Place::Defined(ty, origin, _definedness, widening)) => {
Place::Defined(
ty.recursive_type_normalized(db, cycle),
origin,
Definedness::PossiblyUndefined,
widening,
)
}
// If a `Place` that was `Defined(Divergent)` in the previous cycle is actually found to be unreachable in the current cycle,
// it is set to `Undefined` (because the cycle initial value does not include meaningful reachability information).
(Place::Defined(ty, origin, _definedness, widening), Place::Undefined) => {
if cycle.head_ids().any(|id| ty == Type::divergent(id)) {
Place::Undefined
} else {
Place::Defined(
ty.recursive_type_normalized(db, cycle),
origin,
Definedness::PossiblyUndefined,
widening,
)
}
}
(Place::Undefined, Place::Undefined) => Place::Undefined,
};
PlaceAndQualifiers {
place,
qualifiers: self.qualifiers,
}
}
}
impl<'db> From<Place<'db>> for PlaceAndQualifiers<'db> {
fn from(place: Place<'db>) -> Self {
place.with_qualifiers(TypeQualifiers::empty())
}
}
fn place_cycle_initial<'db>(
_db: &'db dyn Db,
id: salsa::Id,
_scope: ScopeId<'db>,
_place_id: ScopedPlaceId,
_requires_explicit_reexport: RequiresExplicitReExport,
_considered_definitions: ConsideredDefinitions,
) -> PlaceAndQualifiers<'db> {
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | true |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/program.rs | crates/ty_python_semantic/src/program.rs | use std::sync::Arc;
use crate::Db;
use crate::python_platform::PythonPlatform;
use ruff_db::diagnostic::Span;
use ruff_db::files::system_path_to_file;
use ruff_db::system::{SystemPath, SystemPathBuf};
use ruff_python_ast::PythonVersion;
use ruff_text_size::TextRange;
use salsa::Durability;
use salsa::Setter;
use ty_module_resolver::SearchPaths;
#[salsa::input(singleton, heap_size=ruff_memory_usage::heap_size)]
pub struct Program {
#[returns(ref)]
pub python_version_with_source: PythonVersionWithSource,
#[returns(ref)]
pub python_platform: PythonPlatform,
#[returns(ref)]
pub search_paths: SearchPaths,
}
impl Program {
pub fn init_or_update(db: &mut dyn Db, settings: ProgramSettings) -> Self {
match Self::try_get(db) {
Some(program) => {
program.update_from_settings(db, settings);
program
}
None => Self::from_settings(db, settings),
}
}
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> Self {
let ProgramSettings {
python_version,
python_platform,
search_paths,
} = settings;
search_paths.try_register_static_roots(db);
Program::builder(python_version, python_platform, search_paths)
.durability(Durability::HIGH)
.new(db)
}
pub fn python_version(self, db: &dyn Db) -> PythonVersion {
self.python_version_with_source(db).version
}
pub fn update_from_settings(self, db: &mut dyn Db, settings: ProgramSettings) {
let ProgramSettings {
python_version,
python_platform,
search_paths,
} = settings;
if self.search_paths(db) != &search_paths {
tracing::debug!("Updating search paths");
search_paths.try_register_static_roots(db);
self.set_search_paths(db).to(search_paths);
}
if &python_platform != self.python_platform(db) {
tracing::debug!("Updating python platform: `{python_platform:?}`");
self.set_python_platform(db).to(python_platform);
}
if &python_version != self.python_version_with_source(db) {
tracing::debug!(
"Updating python version: Python {version}",
version = python_version.version
);
self.set_python_version_with_source(db).to(python_version);
}
}
pub fn custom_stdlib_search_path(self, db: &dyn Db) -> Option<&SystemPath> {
self.search_paths(db).custom_stdlib()
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ProgramSettings {
pub python_version: PythonVersionWithSource,
pub python_platform: PythonPlatform,
pub search_paths: SearchPaths,
}
#[derive(Clone, Debug, Eq, PartialEq, Default, get_size2::GetSize)]
pub enum PythonVersionSource {
/// Value loaded from a project's configuration file.
ConfigFile(PythonVersionFileSource),
/// Value loaded from the `pyvenv.cfg` file of the virtual environment.
/// The virtual environment might have been configured, activated or inferred.
PyvenvCfgFile(PythonVersionFileSource),
/// Value inferred from the layout of the Python installation.
///
/// This only ever applies on Unix. On Unix, the `site-packages` directory
/// will always be at `sys.prefix/lib/pythonX.Y/site-packages`,
/// so we can infer the Python version from the parent directory of `site-packages`.
InstallationDirectoryLayout { site_packages_parent_dir: Box<str> },
/// The value comes from a CLI argument, while it's left open if specified using a short argument,
/// long argument (`--extra-paths`) or `--config key=value`.
Cli,
/// The value comes from the user's editor,
/// while it's left open if specified as a setting
/// or if the value was auto-discovered by the editor
/// (e.g., the Python environment)
Editor,
/// We fell back to a default value because the value was not specified via the CLI or a config file.
#[default]
Default,
}
/// Information regarding the file and [`TextRange`] of the configuration
/// from which we inferred the Python version.
#[derive(Debug, PartialEq, Eq, Clone, get_size2::GetSize)]
pub struct PythonVersionFileSource {
path: Arc<SystemPathBuf>,
range: Option<TextRange>,
}
impl PythonVersionFileSource {
pub fn new(path: Arc<SystemPathBuf>, range: Option<TextRange>) -> Self {
Self { path, range }
}
/// Attempt to resolve a [`Span`] that corresponds to the location of
/// the configuration setting that specified the Python version.
///
/// Useful for subdiagnostics when informing the user
/// what the inferred Python version of their project is.
pub(crate) fn span(&self, db: &dyn Db) -> Option<Span> {
let file = system_path_to_file(db, &*self.path).ok()?;
Some(Span::from(file).with_optional_range(self.range))
}
}
#[derive(Eq, PartialEq, Debug, Clone, get_size2::GetSize)]
pub struct PythonVersionWithSource {
pub version: PythonVersion,
pub source: PythonVersionSource,
}
impl Default for PythonVersionWithSource {
fn default() -> Self {
Self {
version: PythonVersion::latest_ty(),
source: PythonVersionSource::Default,
}
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/suppression.rs | crates/ty_python_semantic/src/suppression.rs | mod add_ignore;
mod parser;
mod unused;
use smallvec::SmallVec;
use std::fmt;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticId, IntoDiagnosticMessage, Severity, Span,
};
use ruff_db::{files::File, parsed::parsed_module, source::source_text};
use ruff_python_ast::token::TokenKind;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::diagnostic::DiagnosticGuard;
use crate::lint::{GetLintError, Level, LintMetadata, LintRegistry, LintStatus};
pub use crate::suppression::add_ignore::create_suppression_fix;
use crate::suppression::parser::{
ParseError, ParseErrorKind, SuppressionComment, SuppressionParser,
};
use crate::suppression::unused::check_unused_suppressions;
use crate::types::TypeCheckDiagnostics;
use crate::{Db, declare_lint, lint::LintId};
declare_lint! {
/// ## What it does
/// Checks for `type: ignore` or `ty: ignore` directives that are no longer applicable.
///
/// ## Why is this bad?
/// A `type: ignore` directive that no longer matches any diagnostic violations is likely
/// included by mistake, and should be removed to avoid confusion.
///
/// ## Examples
/// ```py
/// a = 20 / 2 # ty: ignore[division-by-zero]
/// ```
///
/// Use instead:
///
/// ```py
/// a = 20 / 2
/// ```
pub(crate) static UNUSED_IGNORE_COMMENT = {
summary: "detects unused `type: ignore` comments",
status: LintStatus::stable("0.0.1-alpha.1"),
default_level: Level::Ignore,
}
}
declare_lint! {
/// ## What it does
/// Checks for `ty: ignore[code]` where `code` isn't a known lint rule.
///
/// ## Why is this bad?
/// A `ty: ignore[code]` directive with a `code` that doesn't match
/// any known rule will not suppress any type errors, and is probably a mistake.
///
/// ## Examples
/// ```py
/// a = 20 / 0 # ty: ignore[division-by-zer]
/// ```
///
/// Use instead:
///
/// ```py
/// a = 20 / 0 # ty: ignore[division-by-zero]
/// ```
pub(crate) static IGNORE_COMMENT_UNKNOWN_RULE = {
summary: "detects `ty: ignore` comments that reference unknown rules",
status: LintStatus::stable("0.0.1-alpha.1"),
default_level: Level::Warn,
}
}
declare_lint! {
/// ## What it does
/// Checks for `type: ignore` and `ty: ignore` comments that are syntactically incorrect.
///
/// ## Why is this bad?
/// A syntactically incorrect ignore comment is probably a mistake and is useless.
///
/// ## Examples
/// ```py
/// a = 20 / 0 # type: ignoree
/// ```
///
/// Use instead:
///
/// ```py
/// a = 20 / 0 # type: ignore
/// ```
pub(crate) static INVALID_IGNORE_COMMENT = {
summary: "detects ignore comments that use invalid syntax",
status: LintStatus::stable("0.0.1-alpha.1"),
default_level: Level::Warn,
}
}
#[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn suppressions(db: &dyn Db, file: File) -> Suppressions {
let parsed = parsed_module(db, file).load(db);
let source = source_text(db, file);
let respect_type_ignore = db.analysis_settings().respect_type_ignore_comments;
let mut builder = SuppressionsBuilder::new(&source, db.lint_registry());
let mut line_start = TextSize::default();
for token in parsed.tokens() {
if !token.kind().is_trivia() {
builder.set_seen_non_trivia_token();
}
match token.kind() {
TokenKind::Comment => {
let parser = SuppressionParser::new(&source, token.range());
for comment in parser {
match comment {
Ok(comment) => {
if comment.kind().is_type_ignore() && !respect_type_ignore {
continue;
}
builder.add_comment(comment, TextRange::new(line_start, token.end()));
}
Err(error) => match error.kind {
ParseErrorKind::NotASuppression
| ParseErrorKind::CommentWithoutHash => {
// Skip non suppression comments and comments that miss a hash (shouldn't ever happen)
}
ParseErrorKind::NoWhitespaceAfterIgnore(kind)
| ParseErrorKind::CodesMissingComma(kind)
| ParseErrorKind::InvalidCode(kind)
| ParseErrorKind::CodesMissingClosingBracket(kind) => {
if kind.is_type_ignore() && !respect_type_ignore {
continue;
}
builder.add_invalid_comment(kind, error);
}
},
}
}
}
TokenKind::Newline | TokenKind::NonLogicalNewline => {
line_start = token.end();
}
_ => {}
}
}
builder.finish()
}
pub(crate) fn check_suppressions(
db: &dyn Db,
file: File,
diagnostics: TypeCheckDiagnostics,
) -> Vec<Diagnostic> {
let mut context = CheckSuppressionsContext::new(db, file, diagnostics);
check_unknown_rule(&mut context);
check_invalid_suppression(&mut context);
check_unused_suppressions(&mut context);
context.diagnostics.into_inner().into_diagnostics()
}
/// Checks for `ty: ignore` comments that reference unknown rules.
fn check_unknown_rule(context: &mut CheckSuppressionsContext) {
if context.is_lint_disabled(&IGNORE_COMMENT_UNKNOWN_RULE) {
return;
}
for unknown in &context.suppressions.unknown {
if let Some(diag) = context.report_lint(&IGNORE_COMMENT_UNKNOWN_RULE, unknown.range) {
diag.into_diagnostic(&unknown.reason);
}
}
}
fn check_invalid_suppression(context: &mut CheckSuppressionsContext) {
if context.is_lint_disabled(&INVALID_IGNORE_COMMENT) {
return;
}
for invalid in &context.suppressions.invalid {
if let Some(diag) = context.report_lint(&INVALID_IGNORE_COMMENT, invalid.error.range) {
diag.into_diagnostic(format_args!(
"Invalid `{kind}` comment: {reason}",
kind = invalid.kind,
reason = &invalid.error
));
}
}
}
struct CheckSuppressionsContext<'a> {
db: &'a dyn Db,
file: File,
suppressions: &'a Suppressions,
diagnostics: std::cell::RefCell<TypeCheckDiagnostics>,
}
impl<'a> CheckSuppressionsContext<'a> {
fn new(db: &'a dyn Db, file: File, diagnostics: TypeCheckDiagnostics) -> Self {
let suppressions = suppressions(db, file);
Self {
db,
file,
suppressions,
diagnostics: diagnostics.into(),
}
}
fn is_lint_disabled(&self, lint: &'static LintMetadata) -> bool {
!self
.db
.rule_selection(self.file)
.is_enabled(LintId::of(lint))
}
fn is_suppression_used(&self, id: FileSuppressionId) -> bool {
self.diagnostics.borrow().is_used(id)
}
fn report_lint<'ctx>(
&'ctx self,
lint: &'static LintMetadata,
range: TextRange,
) -> Option<SuppressionDiagnosticGuardBuilder<'ctx, 'a>> {
if let Some(suppression) = self.suppressions.find_suppression(range, LintId::of(lint)) {
self.diagnostics.borrow_mut().mark_used(suppression.id());
return None;
}
self.report_unchecked(lint, range)
}
/// Reports a diagnostic without checking if the lint at the given range is suppressed or marking
/// the suppression as used.
fn report_unchecked<'ctx>(
&'ctx self,
lint: &'static LintMetadata,
range: TextRange,
) -> Option<SuppressionDiagnosticGuardBuilder<'ctx, 'a>> {
SuppressionDiagnosticGuardBuilder::new(self, lint, range)
}
}
/// A builder for constructing a diagnostic guard.
///
/// This type exists to separate the phases of "check if a diagnostic should
/// be reported" and "build the actual diagnostic."
pub(crate) struct SuppressionDiagnosticGuardBuilder<'ctx, 'db> {
ctx: &'ctx CheckSuppressionsContext<'db>,
id: DiagnosticId,
range: TextRange,
severity: Severity,
}
impl<'ctx, 'db> SuppressionDiagnosticGuardBuilder<'ctx, 'db> {
fn new(
ctx: &'ctx CheckSuppressionsContext<'db>,
lint: &'static LintMetadata,
range: TextRange,
) -> Option<Self> {
let severity = ctx.db.rule_selection(ctx.file).severity(LintId::of(lint))?;
Some(Self {
ctx,
id: DiagnosticId::Lint(lint.name()),
severity,
range,
})
}
/// Create a new guard.
///
/// This initializes a new diagnostic using the given message along with
/// the ID and severity used to create this builder.
///
/// The diagnostic can be further mutated on the guard via its `DerefMut`
/// impl to `Diagnostic`.
pub(crate) fn into_diagnostic(
self,
message: impl IntoDiagnosticMessage,
) -> DiagnosticGuard<'ctx> {
let mut diag = Diagnostic::new(self.id, self.severity, message);
let primary_span = Span::from(self.ctx.file).with_range(self.range);
diag.annotate(Annotation::primary(primary_span));
DiagnosticGuard::new(self.ctx.file, &self.ctx.diagnostics, diag)
}
}
/// The suppressions of a single file.
#[derive(Debug, Eq, PartialEq, get_size2::GetSize)]
pub(crate) struct Suppressions {
/// Suppressions that apply to the entire file.
///
/// The suppressions are sorted by [`Suppression::comment_range`] and the [`Suppression::suppressed_range`]
/// spans the entire file.
///
/// For now, this is limited to `type: ignore` comments.
file: SmallVec<[Suppression; 1]>,
/// Suppressions that apply to a specific line (or lines).
///
/// Comments with multiple codes create multiple [`Suppression`]s that all share the same [`Suppression::comment_range`].
///
/// The suppressions are sorted by [`Suppression::range`] (which implies [`Suppression::comment_range`]).
line: Vec<Suppression>,
/// Suppressions with lint codes that are unknown.
unknown: Vec<UnknownSuppression>,
/// Suppressions that are syntactically invalid.
invalid: Vec<InvalidSuppression>,
}
impl Suppressions {
pub(crate) fn find_suppression(&self, range: TextRange, id: LintId) -> Option<&Suppression> {
self.lint_suppressions(range, id).next()
}
/// Returns all suppressions for the given lint
fn lint_suppressions(
&self,
range: TextRange,
id: LintId,
) -> impl Iterator<Item = &Suppression> + '_ {
self.file
.iter()
.chain(self.line_suppressions(range))
.filter(move |suppression| suppression.matches(id))
}
/// Returns the line-level suppressions that apply for `range`.
///
/// A suppression applies for the given range if it contains the range's
/// start or end offset. This means the suppression is on the same line
/// as the diagnostic's start or end.
fn line_suppressions(&self, range: TextRange) -> impl Iterator<Item = &Suppression> + '_ {
// First find the index of the suppression comment that ends right before the range
// starts. This allows us to skip suppressions that are not relevant for the range.
let end_offset = self
.line
.binary_search_by_key(&range.start(), |suppression| {
suppression.suppressed_range.end()
})
.unwrap_or_else(|index| index);
// From here, search the remaining suppression comments for one that
// contains the range's start or end offset. Stop the search
// as soon as the suppression's range and the range no longer overlap.
self.line[end_offset..]
.iter()
// Stop searching if the suppression starts after the range we're looking for.
.take_while(move |suppression| range.end() >= suppression.suppressed_range.start())
.filter(move |suppression| {
// Don't use intersect to avoid that suppressions on inner-expression
// ignore errors for outer expressions
suppression.suppressed_range.contains(range.start())
|| suppression.suppressed_range.contains(range.end())
})
}
fn iter(&self) -> SuppressionsIter<'_> {
self.file.iter().chain(&self.line)
}
}
pub(crate) type SuppressionsIter<'a> =
std::iter::Chain<std::slice::Iter<'a, Suppression>, std::slice::Iter<'a, Suppression>>;
impl<'a> IntoIterator for &'a Suppressions {
type Item = &'a Suppression;
type IntoIter = SuppressionsIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
/// A `type: ignore` or `ty: ignore` suppression.
///
/// Suppression comments that suppress multiple codes
/// create multiple suppressions: one for every code.
/// They all share the same `comment_range`.
#[derive(Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
pub(crate) struct Suppression {
target: SuppressionTarget,
kind: SuppressionKind,
/// The range of the code in this suppression.
///
/// This is the same as the `comment_range` for the
/// targets [`SuppressionTarget::All`] and [`SuppressionTarget::Empty`].
range: TextRange,
/// The range of the suppression comment.
///
/// This isn't the range of the entire comment if this is a nested comment:
///
/// ```py
/// a # ty: ignore # fmt: off
/// ^^^^^^^^^^^^^
/// ```
///
/// It doesn't include the range of the nested `# fmt: off` comment.
comment_range: TextRange,
/// The range for which this suppression applies.
/// Most of the time, this is the range of the comment's line.
/// However, there are few cases where the range gets expanded to
/// cover multiple lines:
/// * multiline strings: `expr + """multiline\nstring""" # type: ignore`
/// * line continuations: `expr \ + "test" # type: ignore`
suppressed_range: TextRange,
}
impl Suppression {
fn matches(&self, tested_id: LintId) -> bool {
match self.target {
SuppressionTarget::All => true,
SuppressionTarget::Lint(suppressed_id) => tested_id == suppressed_id,
SuppressionTarget::Empty => false,
}
}
pub(crate) fn id(&self) -> FileSuppressionId {
FileSuppressionId(self.range)
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
enum SuppressionKind {
TypeIgnore,
Ty,
}
impl SuppressionKind {
const fn is_type_ignore(self) -> bool {
matches!(self, SuppressionKind::TypeIgnore)
}
fn len_utf8(self) -> usize {
match self {
SuppressionKind::TypeIgnore => "type".len(),
SuppressionKind::Ty => "ty".len(),
}
}
}
impl fmt::Display for SuppressionKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SuppressionKind::TypeIgnore => f.write_str("type: ignore"),
SuppressionKind::Ty => f.write_str("ty: ignore"),
}
}
}
/// Unique ID for a suppression in a file.
///
/// ## Implementation
/// The wrapped `TextRange` is the suppression's range.
/// This is unique enough because it is its exact
/// location in the source.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)]
pub(crate) struct FileSuppressionId(TextRange);
#[derive(Copy, Clone, Debug, Eq, PartialEq, get_size2::GetSize)]
enum SuppressionTarget {
/// Suppress all lints
All,
/// Suppress the lint with the given id
Lint(LintId),
/// Suppresses no lint, e.g. `ty: ignore[]`
Empty,
}
impl SuppressionTarget {
const fn is_lint(self) -> bool {
matches!(self, SuppressionTarget::Lint(_))
}
}
struct SuppressionsBuilder<'a> {
lint_registry: &'a LintRegistry,
source: &'a str,
/// `type: ignore` comments at the top of the file before any non-trivia code apply to the entire file.
/// This boolean tracks if there has been any non trivia token.
seen_non_trivia_token: bool,
line: Vec<Suppression>,
file: SmallVec<[Suppression; 1]>,
unknown: Vec<UnknownSuppression>,
invalid: Vec<InvalidSuppression>,
}
impl<'a> SuppressionsBuilder<'a> {
fn new(source: &'a str, lint_registry: &'a LintRegistry) -> Self {
Self {
source,
lint_registry,
seen_non_trivia_token: false,
line: Vec::new(),
file: SmallVec::new_const(),
unknown: Vec::new(),
invalid: Vec::new(),
}
}
fn set_seen_non_trivia_token(&mut self) {
self.seen_non_trivia_token = true;
}
fn finish(mut self) -> Suppressions {
self.line.shrink_to_fit();
self.file.shrink_to_fit();
self.unknown.shrink_to_fit();
self.invalid.shrink_to_fit();
Suppressions {
file: self.file,
line: self.line,
unknown: self.unknown,
invalid: self.invalid,
}
}
#[expect(clippy::needless_pass_by_value)]
fn add_comment(&mut self, comment: SuppressionComment, line_range: TextRange) {
// `type: ignore` comments at the start of the file apply to the entire range.
// > A # type: ignore comment on a line by itself at the top of a file, before any docstrings,
// > imports, or other executable code, silences all errors in the file.
// > Blank lines and other comments, such as shebang lines and coding cookies,
// > may precede the # type: ignore comment.
// > https://typing.python.org/en/latest/spec/directives.html#type-ignore-comments
let is_file_suppression = comment.kind().is_type_ignore() && !self.seen_non_trivia_token;
let suppressed_range = if is_file_suppression {
TextRange::new(0.into(), self.source.text_len())
} else {
line_range
};
let mut push_type_ignore_suppression = |suppression: Suppression| {
if is_file_suppression {
self.file.push(suppression);
} else {
self.line.push(suppression);
}
};
match comment.codes() {
// `type: ignore`
None => {
push_type_ignore_suppression(Suppression {
target: SuppressionTarget::All,
kind: comment.kind(),
comment_range: comment.range(),
range: comment.range(),
suppressed_range,
});
}
// `type: ignore[..]`
// The suppression applies to all lints if it is a `type: ignore`
// comment. `type: ignore` apply to all lints for better mypy compatibility.
Some(_) if comment.kind().is_type_ignore() => {
push_type_ignore_suppression(Suppression {
target: SuppressionTarget::All,
kind: comment.kind(),
comment_range: comment.range(),
range: comment.range(),
suppressed_range,
});
}
// `ty: ignore[]`
Some([]) => {
self.line.push(Suppression {
target: SuppressionTarget::Empty,
kind: comment.kind(),
range: comment.range(),
comment_range: comment.range(),
suppressed_range,
});
}
// `ty: ignore[a, b]`
Some(codes) => {
for &code_range in codes {
let code = &self.source[code_range];
match self.lint_registry.get(code) {
Ok(lint) => {
self.line.push(Suppression {
target: SuppressionTarget::Lint(lint),
kind: comment.kind(),
range: code_range,
comment_range: comment.range(),
suppressed_range,
});
}
Err(error) => self.unknown.push(UnknownSuppression {
range: code_range,
comment_range: comment.range(),
reason: error,
}),
}
}
}
}
}
fn add_invalid_comment(&mut self, kind: SuppressionKind, error: ParseError) {
self.invalid.push(InvalidSuppression { kind, error });
}
}
/// Suppression for an unknown lint rule.
#[derive(Debug, PartialEq, Eq, get_size2::GetSize)]
struct UnknownSuppression {
/// The range of the code.
range: TextRange,
/// The range of the suppression comment
comment_range: TextRange,
reason: GetLintError,
}
#[derive(Debug, PartialEq, Eq, get_size2::GetSize)]
struct InvalidSuppression {
kind: SuppressionKind,
error: ParseError,
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/list.rs | crates/ty_python_semantic/src/list.rs | //! Sorted, arena-allocated association lists
//!
//! An [_association list_][alist], which is a linked list of key/value pairs. We additionally
//! guarantee that the elements of an association list are sorted (by their keys), and that they do
//! not contain any entries with duplicate keys.
//!
//! Association lists have fallen out of favor in recent decades, since you often need operations
//! that are inefficient on them. In particular, looking up a random element by index is O(n), just
//! like a linked list; and looking up an element by key is also O(n), since you must do a linear
//! scan of the list to find the matching element. The typical implementation also suffers from
//! poor cache locality and high memory allocation overhead, since individual list cells are
//! typically allocated separately from the heap. We solve that last problem by storing the cells
//! of an association list in an [`IndexVec`] arena.
//!
//! We exploit structural sharing where possible, reusing cells across multiple lists when we can.
//! That said, we don't guarantee that lists are canonical — it's entirely possible for two lists
//! with identical contents to use different list cells and have different identifiers.
//!
//! Given all of this, association lists have the following benefits:
//!
//! - Lists can be represented by a single 32-bit integer (the index into the arena of the head of
//! the list).
//! - Lists can be cloned in constant time, since the underlying cells are immutable.
//! - Lists can be combined quickly (for both intersection and union), especially when you already
//! have to zip through both input lists to combine each key's values in some way.
//!
//! There is one remaining caveat:
//!
//! - You should construct lists in key order; doing this lets you insert each value in constant time.
//! Inserting entries in reverse order results in _quadratic_ overall time to construct the list.
//!
//! Lists are created using a [`ListBuilder`], and once created are accessed via a [`ListStorage`].
//!
//! ## Tests
//!
//! This module contains quickcheck-based property tests.
//!
//! These tests are disabled by default, as they are non-deterministic and slow. You can run them
//! explicitly using:
//!
//! ```sh
//! cargo test -p ruff_index -- --ignored list::property_tests
//! ```
//!
//! The number of tests (default: 100) can be controlled by setting the `QUICKCHECK_TESTS`
//! environment variable. For example:
//!
//! ```sh
//! QUICKCHECK_TESTS=10000 cargo test …
//! ```
//!
//! If you want to run these tests for a longer period of time, it's advisable to run them in
//! release mode. As some tests are slower than others, it's advisable to run them in a loop until
//! they fail:
//!
//! ```sh
//! export QUICKCHECK_TESTS=100000
//! while cargo test --release -p ruff_index -- \
//! --ignored list::property_tests; do :; done
//! ```
//!
//! [alist]: https://en.wikipedia.org/wiki/Association_list
use std::cmp::Ordering;
use std::marker::PhantomData;
use std::ops::Deref;
use ruff_index::{IndexVec, newtype_index};
/// A handle to an association list. Use [`ListStorage`] to access its elements, and
/// [`ListBuilder`] to construct other lists based on this one.
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, get_size2::GetSize)]
pub(crate) struct List<K, V = ()> {
last: Option<ListCellId>,
_phantom: PhantomData<(K, V)>,
}
impl<K, V> List<K, V> {
pub(crate) const fn empty() -> List<K, V> {
List::new(None)
}
const fn new(last: Option<ListCellId>) -> List<K, V> {
List {
last,
_phantom: PhantomData,
}
}
}
impl<K, V> Default for List<K, V> {
fn default() -> Self {
List::empty()
}
}
#[newtype_index]
#[derive(PartialOrd, Ord, get_size2::GetSize)]
struct ListCellId;
/// Stores one or more association lists. This type provides read-only access to the lists. Use a
/// [`ListBuilder`] to create lists.
#[derive(Debug, Eq, PartialEq, get_size2::GetSize)]
pub(crate) struct ListStorage<K, V = ()> {
cells: IndexVec<ListCellId, ListCell<K, V>>,
}
/// Each association list is represented by a sequence of snoc cells. A snoc cell is like the more
/// familiar cons cell `(a : (b : (c : nil)))`, but in reverse `(((nil : a) : b) : c)`.
///
/// **Terminology**: The elements of a cons cell are usually called `head` and `tail` (assuming
/// you're not in Lisp-land, where they're called `car` and `cdr`). The elements of a snoc cell
/// are usually called `rest` and `last`.
#[derive(Debug, Eq, PartialEq, get_size2::GetSize)]
struct ListCell<K, V> {
rest: Option<ListCellId>,
key: K,
value: V,
}
/// Constructs one or more association lists.
#[derive(Debug, Eq, PartialEq)]
pub(crate) struct ListBuilder<K, V = ()> {
storage: ListStorage<K, V>,
/// Scratch space that lets us implement our list operations iteratively instead of
/// recursively.
///
/// The snoc-list representation that we use for alists is very common in functional
/// programming, and the simplest implementations of most of the operations are defined
/// recursively on that data structure. However, they are not _tail_ recursive, which means
/// that the call stack grows linearly with the size of the input, which can be a problem for
/// large lists.
///
/// You can often rework those recursive implementations into iterative ones using an
/// _accumulator_, but that comes at the cost of reversing the list. If we didn't care about
/// ordering, that wouldn't be a problem. Since we want our lists to be sorted, we can't rely
/// on that on its own.
///
/// The next standard trick is to use an accumulator, and use a fix-up step at the end to
/// reverse the (reversed) result in the accumulator, restoring the correct order.
///
/// So, that's what we do! However, as one last optimization, we don't build up alist cells in
/// our accumulator, since that would add wasteful cruft to our list storage. Instead, we use a
/// normal Vec as our accumulator, holding the key/value pairs that should be stitched onto the
/// end of whatever result list we are creating. For our fix-up step, we can consume a Vec in
/// reverse order by `pop`ping the elements off one by one.
scratch: Vec<(K, V)>,
}
impl<K, V> Default for ListBuilder<K, V> {
fn default() -> Self {
ListBuilder {
storage: ListStorage {
cells: IndexVec::default(),
},
scratch: Vec::default(),
}
}
}
impl<K, V> Deref for ListBuilder<K, V> {
type Target = ListStorage<K, V>;
fn deref(&self) -> &ListStorage<K, V> {
&self.storage
}
}
impl<K, V> ListBuilder<K, V> {
/// Finalizes a `ListBuilder`. After calling this, you cannot create any new lists managed by
/// this storage.
pub(crate) fn build(mut self) -> ListStorage<K, V> {
self.storage.cells.shrink_to_fit();
self.storage
}
/// Adds a new cell to the list.
///
/// Adding an element always returns a non-empty list, which means we could technically use `I`
/// as our return type, since we never return `None`. However, for consistency with our other
/// methods, we always use `Option<I>` as the return type for any method that can return a
/// list.
#[expect(clippy::unnecessary_wraps)]
fn add_cell(&mut self, rest: Option<ListCellId>, key: K, value: V) -> Option<ListCellId> {
Some(self.storage.cells.push(ListCell { rest, key, value }))
}
/// Returns an entry pointing at where `key` would be inserted into a list.
///
/// Note that when we add a new element to a list, we might have to clone the keys and values
/// of some existing elements. This is because list cells are immutable once created, since
/// they might be shared across multiple lists. We must therefore create new cells for every
/// element that appears after the new element.
///
/// That means that you should construct lists in key order, since that means that there are no
/// entries to duplicate for each insertion. If you construct the list in reverse order, we
/// will have to duplicate O(n) entries for each insertion, making it _quadratic_ to construct
/// the entire list.
pub(crate) fn entry(&mut self, list: List<K, V>, key: K) -> ListEntry<'_, K, V>
where
K: Clone + Ord,
V: Clone,
{
self.scratch.clear();
// Iterate through the input list, looking for the position where the key should be
// inserted. We will need to create new list cells for any elements that appear after the
// new key. Stash those away in our scratch accumulator as we step through the input. The
// result of the loop is that "rest" of the result list, which we will stitch the new key
// (and any succeeding keys) onto.
let mut curr = list.last;
while let Some(curr_id) = curr {
let cell = &self.storage.cells[curr_id];
match key.cmp(&cell.key) {
// We found an existing entry in the input list with the desired key.
Ordering::Equal => {
return ListEntry {
builder: self,
list,
key,
rest: ListTail::Occupied(curr_id),
};
}
// The input list does not already contain this key, and this is where we should
// add it.
Ordering::Greater => {
return ListEntry {
builder: self,
list,
key,
rest: ListTail::Vacant(curr_id),
};
}
// If this key is in the list, it's further along. We'll need to create a new cell
// for this entry in the result list, so add its contents to the scratch
// accumulator.
Ordering::Less => {
let new_key = cell.key.clone();
let new_value = cell.value.clone();
self.scratch.push((new_key, new_value));
curr = cell.rest;
}
}
}
// We made it all the way through the list without finding the desired key, so it belongs
// at the beginning. (And we will unfortunately have to duplicate every existing cell if
// the caller proceeds with inserting the new key!)
ListEntry {
builder: self,
list,
key,
rest: ListTail::Beginning,
}
}
}
/// A view into a list, indicating where a key would be inserted.
pub(crate) struct ListEntry<'a, K, V = ()> {
builder: &'a mut ListBuilder<K, V>,
list: List<K, V>,
key: K,
/// Points at the element that already contains `key`, if there is one, or the element
/// immediately before where it would go, if not.
rest: ListTail<ListCellId>,
}
enum ListTail<I> {
/// The list does not already contain `key`, and it would go at the beginning of the list.
Beginning,
/// The list already contains `key`
Occupied(I),
/// The list does not already contain key, and it would go immediately after the given element
Vacant(I),
}
impl<K, V> ListEntry<'_, K, V>
where
K: Clone,
V: Clone,
{
fn stitch_up(self, rest: Option<ListCellId>, value: V) -> List<K, V> {
let mut last = rest;
last = self.builder.add_cell(last, self.key, value);
while let Some((key, value)) = self.builder.scratch.pop() {
last = self.builder.add_cell(last, key, value);
}
List::new(last)
}
/// Inserts a new key/value into the list if the key is not already present. If the list
/// already contains `key`, we return the original list as-is, and do not invoke your closure.
pub(crate) fn or_insert_with<F>(self, f: F) -> List<K, V>
where
F: FnOnce() -> V,
{
let rest = match self.rest {
// If the list already contains `key`, we don't need to replace anything, and can
// return the original list unmodified.
ListTail::Occupied(_) => return self.list,
// Otherwise we have to create a new entry and stitch it onto the list.
ListTail::Beginning => None,
ListTail::Vacant(index) => Some(index),
};
self.stitch_up(rest, f())
}
/// Inserts a new key and the default value into the list if the key is not already present. If
/// the list already contains `key`, we return the original list as-is.
pub(crate) fn or_insert_default(self) -> List<K, V>
where
V: Default,
{
self.or_insert_with(V::default)
}
}
impl<K, V> ListBuilder<K, V> {
/// Returns the intersection of two lists. The result will contain an entry for any key that
/// appears in both lists. The corresponding values will be combined using the `combine`
/// function that you provide.
#[expect(clippy::needless_pass_by_value)]
pub(crate) fn intersect_with<F>(
&mut self,
a: List<K, V>,
b: List<K, V>,
mut combine: F,
) -> List<K, V>
where
K: Clone + Ord,
V: Clone,
F: FnMut(&V, &V) -> V,
{
self.scratch.clear();
// Zip through the lists, building up the keys/values of the new entries into our scratch
// vector. Continue until we run out of elements in either list. (Any remaining elements in
// the other list cannot possibly be in the intersection.)
let mut a = a.last;
let mut b = b.last;
while let (Some(a_id), Some(b_id)) = (a, b) {
let a_cell = &self.storage.cells[a_id];
let b_cell = &self.storage.cells[b_id];
match a_cell.key.cmp(&b_cell.key) {
// Both lists contain this key; combine their values
Ordering::Equal => {
let new_key = a_cell.key.clone();
let new_value = combine(&a_cell.value, &b_cell.value);
self.scratch.push((new_key, new_value));
a = a_cell.rest;
b = b_cell.rest;
}
// a's key is only present in a, so it's not included in the result.
Ordering::Greater => a = a_cell.rest,
// b's key is only present in b, so it's not included in the result.
Ordering::Less => b = b_cell.rest,
}
}
// Once the iteration loop terminates, we stitch the new entries back together into proper
// alist cells.
let mut last = None;
while let Some((key, value)) = self.scratch.pop() {
last = self.add_cell(last, key, value);
}
List::new(last)
}
}
// ----
// Sets
impl<K> ListStorage<K, ()> {
/// Iterates through the elements in a set _in reverse order_.
#[expect(clippy::needless_pass_by_value)]
pub(crate) fn iter_set_reverse(&self, set: List<K, ()>) -> ListSetReverseIterator<'_, K> {
ListSetReverseIterator {
storage: self,
curr: set.last,
}
}
}
pub(crate) struct ListSetReverseIterator<'a, K> {
storage: &'a ListStorage<K, ()>,
curr: Option<ListCellId>,
}
impl<'a, K> Iterator for ListSetReverseIterator<'a, K> {
type Item = &'a K;
fn next(&mut self) -> Option<Self::Item> {
let cell = &self.storage.cells[self.curr?];
self.curr = cell.rest;
Some(&cell.key)
}
}
impl<K> ListBuilder<K, ()> {
/// Adds an element to a set.
pub(crate) fn insert(&mut self, set: List<K, ()>, element: K) -> List<K, ()>
where
K: Clone + Ord,
{
self.entry(set, element).or_insert_default()
}
/// Returns the intersection of two sets. The result will contain any value that appears in
/// both sets.
pub(crate) fn intersect(&mut self, a: List<K, ()>, b: List<K, ()>) -> List<K, ()>
where
K: Clone + Ord,
{
self.intersect_with(a, b, |(), ()| ())
}
}
// -----
// Tests
#[cfg(test)]
mod tests {
use super::*;
use std::fmt::Display;
use std::fmt::Write;
// ----
// Sets
impl<K> ListStorage<K>
where
K: Display,
{
fn display_set(&self, list: List<K, ()>) -> String {
let elements: Vec<_> = self.iter_set_reverse(list).collect();
let mut result = String::new();
result.push('[');
for element in elements.into_iter().rev() {
if result.len() > 1 {
result.push_str(", ");
}
write!(&mut result, "{element}").unwrap();
}
result.push(']');
result
}
}
#[test]
fn can_insert_into_set() {
let mut builder = ListBuilder::<u16>::default();
// Build up the set in order
let empty = List::empty();
let set1 = builder.insert(empty, 1);
let set12 = builder.insert(set1, 2);
let set123 = builder.insert(set12, 3);
let set1232 = builder.insert(set123, 2);
assert_eq!(builder.display_set(empty), "[]");
assert_eq!(builder.display_set(set1), "[1]");
assert_eq!(builder.display_set(set12), "[1, 2]");
assert_eq!(builder.display_set(set123), "[1, 2, 3]");
assert_eq!(builder.display_set(set1232), "[1, 2, 3]");
// And in reverse order
let set3 = builder.insert(empty, 3);
let set32 = builder.insert(set3, 2);
let set321 = builder.insert(set32, 1);
let set3212 = builder.insert(set321, 2);
assert_eq!(builder.display_set(empty), "[]");
assert_eq!(builder.display_set(set3), "[3]");
assert_eq!(builder.display_set(set32), "[2, 3]");
assert_eq!(builder.display_set(set321), "[1, 2, 3]");
assert_eq!(builder.display_set(set3212), "[1, 2, 3]");
}
#[test]
fn can_intersect_sets() {
let mut builder = ListBuilder::<u16>::default();
let empty = List::empty();
let set1 = builder.insert(empty, 1);
let set12 = builder.insert(set1, 2);
let set123 = builder.insert(set12, 3);
let set1234 = builder.insert(set123, 4);
let set2 = builder.insert(empty, 2);
let set24 = builder.insert(set2, 4);
let set245 = builder.insert(set24, 5);
let set2457 = builder.insert(set245, 7);
let intersection = builder.intersect(empty, empty);
assert_eq!(builder.display_set(intersection), "[]");
let intersection = builder.intersect(empty, set1234);
assert_eq!(builder.display_set(intersection), "[]");
let intersection = builder.intersect(empty, set2457);
assert_eq!(builder.display_set(intersection), "[]");
let intersection = builder.intersect(set1, set1234);
assert_eq!(builder.display_set(intersection), "[1]");
let intersection = builder.intersect(set1, set2457);
assert_eq!(builder.display_set(intersection), "[]");
let intersection = builder.intersect(set2, set1234);
assert_eq!(builder.display_set(intersection), "[2]");
let intersection = builder.intersect(set2, set2457);
assert_eq!(builder.display_set(intersection), "[2]");
let intersection = builder.intersect(set1234, set2457);
assert_eq!(builder.display_set(intersection), "[2, 4]");
}
// ----
// Maps
impl<K, V> ListStorage<K, V> {
/// Iterates through the entries in a list _in reverse order by key_.
#[expect(clippy::needless_pass_by_value)]
pub(crate) fn iter_reverse(&self, list: List<K, V>) -> ListReverseIterator<'_, K, V> {
ListReverseIterator {
storage: self,
curr: list.last,
}
}
}
pub(crate) struct ListReverseIterator<'a, K, V> {
storage: &'a ListStorage<K, V>,
curr: Option<ListCellId>,
}
impl<'a, K, V> Iterator for ListReverseIterator<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<Self::Item> {
let cell = &self.storage.cells[self.curr?];
self.curr = cell.rest;
Some((&cell.key, &cell.value))
}
}
impl<K, V> ListStorage<K, V>
where
K: Display,
V: Display,
{
fn display(&self, list: List<K, V>) -> String {
let entries: Vec<_> = self.iter_reverse(list).collect();
let mut result = String::new();
result.push('[');
for (key, value) in entries.into_iter().rev() {
if result.len() > 1 {
result.push_str(", ");
}
write!(&mut result, "{key}:{value}").unwrap();
}
result.push(']');
result
}
}
#[test]
fn can_insert_into_map() {
let mut builder = ListBuilder::<u16, u16>::default();
// Build up the map in order
let empty = List::empty();
let map1 = builder.entry(empty, 1).or_insert_with(|| 1);
let map12 = builder.entry(map1, 2).or_insert_with(|| 2);
let map123 = builder.entry(map12, 3).or_insert_with(|| 3);
let map1232 = builder.entry(map123, 2).or_insert_with(|| 4);
assert_eq!(builder.display(empty), "[]");
assert_eq!(builder.display(map1), "[1:1]");
assert_eq!(builder.display(map12), "[1:1, 2:2]");
assert_eq!(builder.display(map123), "[1:1, 2:2, 3:3]");
assert_eq!(builder.display(map1232), "[1:1, 2:2, 3:3]");
// And in reverse order
let map3 = builder.entry(empty, 3).or_insert_with(|| 3);
let map32 = builder.entry(map3, 2).or_insert_with(|| 2);
let map321 = builder.entry(map32, 1).or_insert_with(|| 1);
let map3212 = builder.entry(map321, 2).or_insert_with(|| 4);
assert_eq!(builder.display(empty), "[]");
assert_eq!(builder.display(map3), "[3:3]");
assert_eq!(builder.display(map32), "[2:2, 3:3]");
assert_eq!(builder.display(map321), "[1:1, 2:2, 3:3]");
assert_eq!(builder.display(map3212), "[1:1, 2:2, 3:3]");
}
#[test]
fn can_intersect_maps() {
let mut builder = ListBuilder::<u16, u16>::default();
let empty = List::empty();
let map1 = builder.entry(empty, 1).or_insert_with(|| 1);
let map12 = builder.entry(map1, 2).or_insert_with(|| 2);
let map123 = builder.entry(map12, 3).or_insert_with(|| 3);
let map1234 = builder.entry(map123, 4).or_insert_with(|| 4);
let map2 = builder.entry(empty, 2).or_insert_with(|| 20);
let map24 = builder.entry(map2, 4).or_insert_with(|| 40);
let map245 = builder.entry(map24, 5).or_insert_with(|| 50);
let map2457 = builder.entry(map245, 7).or_insert_with(|| 70);
let intersection = builder.intersect_with(empty, empty, |a, b| a + b);
assert_eq!(builder.display(intersection), "[]");
let intersection = builder.intersect_with(empty, map1234, |a, b| a + b);
assert_eq!(builder.display(intersection), "[]");
let intersection = builder.intersect_with(empty, map2457, |a, b| a + b);
assert_eq!(builder.display(intersection), "[]");
let intersection = builder.intersect_with(map1, map1234, |a, b| a + b);
assert_eq!(builder.display(intersection), "[1:2]");
let intersection = builder.intersect_with(map1, map2457, |a, b| a + b);
assert_eq!(builder.display(intersection), "[]");
let intersection = builder.intersect_with(map2, map1234, |a, b| a + b);
assert_eq!(builder.display(intersection), "[2:22]");
let intersection = builder.intersect_with(map2, map2457, |a, b| a + b);
assert_eq!(builder.display(intersection), "[2:40]");
let intersection = builder.intersect_with(map1234, map2457, |a, b| a + b);
assert_eq!(builder.display(intersection), "[2:22, 4:44]");
}
}
// --------------
// Property tests
#[cfg(test)]
mod property_tests {
use super::*;
use std::collections::{BTreeMap, BTreeSet};
impl<K> ListBuilder<K>
where
K: Clone + Ord,
{
fn set_from_elements<'a>(&mut self, elements: impl IntoIterator<Item = &'a K>) -> List<K>
where
K: 'a,
{
let mut set = List::empty();
for element in elements {
set = self.insert(set, element.clone());
}
set
}
}
// For most of the tests below, we use a vec as our input, instead of a HashSet or BTreeSet,
// since we want to test the behavior of adding duplicate elements to the set.
#[quickcheck_macros::quickcheck]
#[ignore]
#[expect(clippy::needless_pass_by_value)]
fn roundtrip_set_from_vec(elements: Vec<u16>) -> bool {
let mut builder = ListBuilder::default();
let set = builder.set_from_elements(&elements);
let expected: BTreeSet<_> = elements.iter().copied().collect();
let actual = builder.iter_set_reverse(set).copied();
actual.eq(expected.into_iter().rev())
}
#[quickcheck_macros::quickcheck]
#[ignore]
#[expect(clippy::needless_pass_by_value)]
fn roundtrip_set_intersection(a_elements: Vec<u16>, b_elements: Vec<u16>) -> bool {
let mut builder = ListBuilder::default();
let a = builder.set_from_elements(&a_elements);
let b = builder.set_from_elements(&b_elements);
let intersection = builder.intersect(a, b);
let a_set: BTreeSet<_> = a_elements.iter().copied().collect();
let b_set: BTreeSet<_> = b_elements.iter().copied().collect();
let expected: Vec<_> = a_set.intersection(&b_set).copied().collect();
let actual = builder.iter_set_reverse(intersection).copied();
actual.eq(expected.into_iter().rev())
}
impl<K, V> ListBuilder<K, V>
where
K: Clone + Ord,
V: Clone + Eq,
{
fn set_from_pairs<'a, I>(&mut self, pairs: I) -> List<K, V>
where
K: 'a,
V: 'a,
I: IntoIterator<Item = &'a (K, V)>,
I::IntoIter: DoubleEndedIterator,
{
let mut list = List::empty();
for (key, value) in pairs.into_iter().rev() {
list = self
.entry(list, key.clone())
.or_insert_with(|| value.clone());
}
list
}
}
fn join<K, V>(a: &BTreeMap<K, V>, b: &BTreeMap<K, V>) -> BTreeMap<K, (Option<V>, Option<V>)>
where
K: Clone + Ord,
V: Clone + Ord,
{
let mut joined: BTreeMap<K, (Option<V>, Option<V>)> = BTreeMap::new();
for (k, v) in a {
joined.entry(k.clone()).or_default().0 = Some(v.clone());
}
for (k, v) in b {
joined.entry(k.clone()).or_default().1 = Some(v.clone());
}
joined
}
#[quickcheck_macros::quickcheck]
#[ignore]
#[expect(clippy::needless_pass_by_value)]
fn roundtrip_list_from_vec(pairs: Vec<(u16, u16)>) -> bool {
let mut builder = ListBuilder::default();
let list = builder.set_from_pairs(&pairs);
let expected: BTreeMap<_, _> = pairs.iter().copied().collect();
let actual = builder.iter_reverse(list).map(|(k, v)| (*k, *v));
actual.eq(expected.into_iter().rev())
}
#[quickcheck_macros::quickcheck]
#[ignore]
#[expect(clippy::needless_pass_by_value)]
fn roundtrip_list_intersection(
a_elements: Vec<(u16, u16)>,
b_elements: Vec<(u16, u16)>,
) -> bool {
let mut builder = ListBuilder::default();
let a = builder.set_from_pairs(&a_elements);
let b = builder.set_from_pairs(&b_elements);
let intersection = builder.intersect_with(a, b, |a, b| a + b);
let a_map: BTreeMap<_, _> = a_elements.iter().copied().collect();
let b_map: BTreeMap<_, _> = b_elements.iter().copied().collect();
let intersection_map = join(&a_map, &b_map);
let expected: Vec<_> = intersection_map
.into_iter()
.filter_map(|(k, (v1, v2))| Some((k, v1? + v2?)))
.collect();
let actual = builder.iter_reverse(intersection).map(|(k, v)| (*k, *v));
actual.eq(expected.into_iter().rev())
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/lint.rs | crates/ty_python_semantic/src/lint.rs | use crate::diagnostic::did_you_mean;
use core::fmt;
use itertools::Itertools;
use ruff_db::diagnostic::{DiagnosticId, LintName, Severity};
use rustc_hash::FxHashMap;
use std::error::Error;
use std::fmt::Formatter;
use std::hash::Hasher;
#[derive(Debug, Clone)]
pub struct LintMetadata {
/// The unique identifier for the lint.
pub name: LintName,
/// A one-sentence summary of what the lint catches.
pub summary: &'static str,
/// An in depth explanation of the lint in markdown. Covers what the lint does, why it's bad and possible fixes.
///
/// The documentation may require post-processing to be rendered correctly. For example, lines
/// might have leading or trailing whitespace that should be removed.
pub raw_documentation: &'static str,
/// The default level of the lint if the user doesn't specify one.
pub default_level: Level,
pub status: LintStatus,
/// The source file in which the lint is declared.
pub file: &'static str,
/// The 1-based line number in the source `file` where the lint is declared.
pub line: u32,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
#[cfg_attr(
feature = "serde",
derive(serde::Serialize, serde::Deserialize),
serde(rename_all = "kebab-case")
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub enum Level {
/// # Ignore
///
/// The lint is disabled and should not run.
Ignore,
/// # Warn
///
/// The lint is enabled and diagnostic should have a warning severity.
Warn,
/// # Error
///
/// The lint is enabled and diagnostics have an error severity.
Error,
}
impl Level {
pub const fn is_error(self) -> bool {
matches!(self, Level::Error)
}
pub const fn is_warn(self) -> bool {
matches!(self, Level::Warn)
}
pub const fn is_ignore(self) -> bool {
matches!(self, Level::Ignore)
}
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Level::Ignore => f.write_str("ignore"),
Level::Warn => f.write_str("warn"),
Level::Error => f.write_str("error"),
}
}
}
impl TryFrom<Level> for Severity {
type Error = ();
fn try_from(level: Level) -> Result<Self, ()> {
match level {
Level::Ignore => Err(()),
Level::Warn => Ok(Severity::Warning),
Level::Error => Ok(Severity::Error),
}
}
}
impl LintMetadata {
pub fn name(&self) -> LintName {
self.name
}
pub fn summary(&self) -> &str {
self.summary
}
/// Returns the documentation line by line with one leading space and all trailing whitespace removed.
pub fn documentation_lines(&self) -> impl Iterator<Item = &str> {
self.raw_documentation.lines().map(|line| {
line.strip_prefix(char::is_whitespace)
.unwrap_or(line)
.trim_end()
})
}
/// Returns the documentation as a single string.
pub fn documentation(&self) -> String {
self.documentation_lines().join("\n")
}
pub fn documentation_url(&self) -> String {
lint_documentation_url(self.name())
}
pub fn default_level(&self) -> Level {
self.default_level
}
pub fn status(&self) -> &LintStatus {
&self.status
}
pub fn file(&self) -> &str {
self.file
}
pub fn line(&self) -> u32 {
self.line
}
}
pub fn lint_documentation_url(lint_name: LintName) -> String {
format!("https://ty.dev/rules#{lint_name}")
}
#[doc(hidden)]
pub const fn lint_metadata_defaults() -> LintMetadata {
LintMetadata {
name: LintName::of(""),
summary: "",
raw_documentation: "",
default_level: Level::Error,
status: LintStatus::preview("0.0.0"),
file: "",
line: 1,
}
}
#[derive(Copy, Clone, Debug)]
pub enum LintStatus {
/// The lint has been added to the linter, but is not yet stable.
Preview {
/// The version in which the lint was added.
since: &'static str,
},
/// The lint is stable.
Stable {
/// The version in which the lint was stabilized.
since: &'static str,
},
/// The lint is deprecated and no longer recommended for use.
Deprecated {
/// The version in which the lint was deprecated.
since: &'static str,
/// The reason why the lint has been deprecated.
///
/// This should explain why the lint has been deprecated and if there's a replacement lint that users
/// can use instead.
reason: &'static str,
},
/// The lint has been removed and can no longer be used.
Removed {
/// The version in which the lint was removed.
since: &'static str,
/// The reason why the lint has been removed.
reason: &'static str,
},
}
impl LintStatus {
pub const fn preview(since: &'static str) -> Self {
LintStatus::Preview { since }
}
pub const fn stable(since: &'static str) -> Self {
LintStatus::Stable { since }
}
pub const fn deprecated(since: &'static str, reason: &'static str) -> Self {
LintStatus::Deprecated { since, reason }
}
pub const fn removed(since: &'static str, reason: &'static str) -> Self {
LintStatus::Removed { since, reason }
}
pub const fn is_removed(&self) -> bool {
matches!(self, LintStatus::Removed { .. })
}
pub const fn is_deprecated(&self) -> bool {
matches!(self, LintStatus::Deprecated { .. })
}
}
/// Declares a lint rule with the given metadata.
///
/// ```rust
/// use ty_python_semantic::declare_lint;
/// use ty_python_semantic::lint::{LintStatus, Level};
///
/// declare_lint! {
/// /// ## What it does
/// /// Checks for references to names that are not defined.
/// ///
/// /// ## Why is this bad?
/// /// Using an undefined variable will raise a `NameError` at runtime.
/// ///
/// /// ## Example
/// ///
/// /// ```python
/// /// print(x) # NameError: name 'x' is not defined
/// /// ```
/// pub(crate) static UNRESOLVED_REFERENCE = {
/// summary: "detects references to names that are not defined",
/// status: LintStatus::preview("1.0.0"),
/// default_level: Level::Warn,
/// }
/// }
/// ```
#[macro_export]
macro_rules! declare_lint {
(
$(#[doc = $doc:literal])+
$vis: vis static $name: ident = {
summary: $summary: literal,
status: $status: expr,
// Optional properties
$( $key:ident: $value:expr, )*
}
) => {
$( #[doc = $doc] )+
#[expect(clippy::needless_update)]
$vis static $name: $crate::lint::LintMetadata = $crate::lint::LintMetadata {
name: ruff_db::diagnostic::LintName::of(ruff_macros::kebab_case!($name)),
summary: $summary,
raw_documentation: concat!($($doc, '\n',)+),
status: $status,
file: file!(),
line: line!(),
$( $key: $value, )*
..$crate::lint::lint_metadata_defaults()
};
};
}
/// A unique identifier for a lint rule.
///
/// Implements `PartialEq`, `Eq`, and `Hash` based on the `LintMetadata` pointer
/// for fast comparison and lookup.
#[derive(Debug, Clone, Copy, get_size2::GetSize)]
pub struct LintId {
definition: &'static LintMetadata,
}
impl LintId {
pub const fn of(definition: &'static LintMetadata) -> Self {
LintId { definition }
}
}
impl PartialEq for LintId {
fn eq(&self, other: &Self) -> bool {
std::ptr::eq(self.definition, other.definition)
}
}
impl Eq for LintId {}
impl std::hash::Hash for LintId {
fn hash<H: Hasher>(&self, state: &mut H) {
std::ptr::hash(self.definition, state);
}
}
impl std::ops::Deref for LintId {
type Target = LintMetadata;
fn deref(&self) -> &Self::Target {
self.definition
}
}
#[derive(Default, Debug)]
pub struct LintRegistryBuilder {
/// Registered lints that haven't been removed.
lints: Vec<LintId>,
/// Lints indexed by name, including aliases and removed rules.
by_name: FxHashMap<&'static str, LintEntry>,
}
impl LintRegistryBuilder {
#[track_caller]
pub fn register_lint(&mut self, lint: &'static LintMetadata) {
assert_eq!(
self.by_name.insert(&*lint.name, lint.into()),
None,
"duplicate lint registration for '{name}'",
name = lint.name
);
if !lint.status.is_removed() {
self.lints.push(LintId::of(lint));
}
}
#[track_caller]
pub fn register_alias(&mut self, from: LintName, to: &'static LintMetadata) {
let target = match self.by_name.get(to.name.as_str()) {
Some(LintEntry::Lint(target) | LintEntry::Removed(target)) => target,
Some(LintEntry::Alias(target)) => {
panic!(
"lint alias {from} -> {to:?} points to another alias {target:?}",
target = target.name()
)
}
None => panic!(
"lint alias {from} -> {to} points to non-registered lint",
to = to.name
),
};
assert_eq!(
self.by_name
.insert(from.as_str(), LintEntry::Alias(*target)),
None,
"duplicate lint registration for '{from}'",
);
}
pub fn build(self) -> LintRegistry {
LintRegistry {
lints: self.lints,
by_name: self.by_name,
}
}
}
#[derive(Default, Debug, Clone)]
pub struct LintRegistry {
lints: Vec<LintId>,
by_name: FxHashMap<&'static str, LintEntry>,
}
impl LintRegistry {
/// Looks up a lint by its name.
pub fn get(&self, code: &str) -> Result<LintId, GetLintError> {
match self.by_name.get(code) {
Some(LintEntry::Lint(metadata)) => Ok(*metadata),
Some(LintEntry::Alias(lint)) => {
if lint.status.is_removed() {
Err(GetLintError::Removed(lint.name()))
} else {
Ok(*lint)
}
}
Some(LintEntry::Removed(lint)) => Err(GetLintError::Removed(lint.name())),
None => {
if let Some(without_prefix) = DiagnosticId::strip_category(code) {
if let Some(entry) = self.by_name.get(without_prefix) {
return Err(GetLintError::PrefixedWithCategory {
prefixed: code.to_string(),
suggestion: entry.id().name.to_string(),
});
}
}
let suggestion = did_you_mean(self.by_name.keys(), code);
Err(GetLintError::Unknown {
code: code.to_string(),
suggestion,
})
}
}
}
/// Returns all registered, non-removed lints.
pub fn lints(&self) -> &[LintId] {
&self.lints
}
/// Returns an iterator over all known aliases and to their target lints.
///
/// This iterator includes aliases that point to removed lints.
pub fn aliases(&self) -> impl Iterator<Item = (LintName, LintId)> + '_ {
self.by_name.iter().filter_map(|(key, value)| {
if let LintEntry::Alias(alias) = value {
Some((LintName::of(key), *alias))
} else {
None
}
})
}
/// Iterates over all removed lints.
pub fn removed(&self) -> impl Iterator<Item = LintId> + '_ {
self.by_name.iter().filter_map(|(_, value)| {
if let LintEntry::Removed(metadata) = value {
Some(*metadata)
} else {
None
}
})
}
}
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
pub enum GetLintError {
/// The name maps to this removed lint.
Removed(LintName),
/// No lint with the given name is known.
Unknown {
code: String,
suggestion: Option<String>,
},
/// The name uses the full qualified diagnostic id `lint:<rule>` instead of just `rule`.
/// The String is the name without the `lint:` category prefix.
PrefixedWithCategory {
prefixed: String,
suggestion: String,
},
}
impl Error for GetLintError {}
impl std::fmt::Display for GetLintError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
GetLintError::Removed(code) => write!(f, "Removed rule `{code}`"),
GetLintError::Unknown { code, suggestion } => match suggestion {
None => write!(f, "Unknown rule `{code}`"),
Some(suggestion) => {
write!(f, "Unknown rule `{code}`. Did you mean `{suggestion}`?")
}
},
GetLintError::PrefixedWithCategory {
prefixed,
suggestion,
} => write!(f, "Unknown rule `{prefixed}`. Did you mean `{suggestion}`?"),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum LintEntry {
/// An existing lint rule. Can be in preview, stable or deprecated.
Lint(LintId),
/// A lint rule that has been removed.
Removed(LintId),
Alias(LintId),
}
impl LintEntry {
fn id(self) -> LintId {
match self {
LintEntry::Lint(id) => id,
LintEntry::Removed(id) => id,
LintEntry::Alias(id) => id,
}
}
}
impl From<&'static LintMetadata> for LintEntry {
fn from(metadata: &'static LintMetadata) -> Self {
if metadata.status.is_removed() {
LintEntry::Removed(LintId::of(metadata))
} else {
LintEntry::Lint(LintId::of(metadata))
}
}
}
#[derive(Clone, Default, PartialEq, Eq, get_size2::GetSize)]
pub struct RuleSelection {
/// Map with the severity for each enabled lint rule.
///
/// If a rule isn't present in this map, then it should be considered disabled.
lints: FxHashMap<LintId, (Severity, LintSource)>,
}
impl RuleSelection {
/// Creates a new rule selection from all known lints in the registry that are enabled
/// according to their default severity.
pub fn from_registry(registry: &LintRegistry) -> Self {
Self::from_registry_with_default(registry, None)
}
/// Creates a new rule selection from all known lints in the registry, including lints that are default by default.
/// Lints that are disabled by default use the `default_severity`.
pub fn all(registry: &LintRegistry, default_severity: Severity) -> Self {
Self::from_registry_with_default(registry, Some(default_severity))
}
fn from_registry_with_default(
registry: &LintRegistry,
default_severity: Option<Severity>,
) -> Self {
let lints = registry
.lints()
.iter()
.filter_map(|lint| {
Severity::try_from(lint.default_level())
.ok()
.or(default_severity)
.map(|severity| (*lint, (severity, LintSource::Default)))
})
.collect();
RuleSelection { lints }
}
/// Returns an iterator over all enabled lints.
pub fn enabled(&self) -> impl Iterator<Item = LintId> + '_ {
self.lints.keys().copied()
}
/// Returns an iterator over all enabled lints and their severity.
pub fn iter(&self) -> impl ExactSizeIterator<Item = (LintId, Severity)> + '_ {
self.lints
.iter()
.map(|(&lint, &(severity, _))| (lint, severity))
}
/// Returns the configured severity for the lint with the given id or `None` if the lint is disabled.
pub fn severity(&self, lint: LintId) -> Option<Severity> {
self.lints.get(&lint).map(|(severity, _)| *severity)
}
pub fn get(&self, lint: LintId) -> Option<(Severity, LintSource)> {
self.lints.get(&lint).copied()
}
/// Returns `true` if the `lint` is enabled.
pub fn is_enabled(&self, lint: LintId) -> bool {
self.severity(lint).is_some()
}
/// Enables `lint` and configures with the given `severity`.
///
/// Overrides any previous configuration for the lint.
pub fn enable(&mut self, lint: LintId, severity: Severity, source: LintSource) {
self.lints.insert(lint, (severity, source));
}
/// Disables `lint` if it was previously enabled.
pub fn disable(&mut self, lint: LintId) {
self.lints.remove(&lint);
}
}
// The default `LintId` debug implementation prints the entire lint metadata.
// This is way too verbose.
impl fmt::Debug for RuleSelection {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let lints = self.lints.iter().sorted_by_key(|(lint, _)| lint.name);
if f.alternate() {
let mut f = f.debug_map();
for (lint, (severity, source)) in lints {
f.entry(
&lint.name().as_str(),
&format_args!("{severity:?} ({source:?})"),
);
}
f.finish()
} else {
let mut f = f.debug_set();
for (lint, _) in lints {
f.entry(&lint.name());
}
f.finish()
}
}
}
#[derive(Default, Copy, Clone, Debug, PartialEq, Eq, get_size2::GetSize)]
pub enum LintSource {
/// The user didn't enable the rule explicitly, instead it's enabled by default.
#[default]
Default,
/// The rule was enabled by using a CLI argument
Cli,
/// The rule was enabled in a configuration file.
File,
/// The rule was enabled from the configuration in the editor.
Editor,
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types.rs | crates/ty_python_semantic/src/types.rs | use compact_str::CompactString;
use infer::nearest_enclosing_class;
use itertools::{Either, Itertools};
use ruff_diagnostics::{Edit, Fix};
use std::borrow::Cow;
use std::cell::RefCell;
use std::time::Duration;
use bitflags::bitflags;
use call::{CallDunderError, CallError, CallErrorKind};
use context::InferContext;
use diagnostic::{INVALID_CONTEXT_MANAGER, NOT_ITERABLE, POSSIBLY_MISSING_IMPLICIT_CALL};
use ruff_db::Instant;
use ruff_db::diagnostic::{Annotation, Diagnostic, Span, SubDiagnostic, SubDiagnosticSeverity};
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use ruff_text_size::{Ranged, TextRange};
use smallvec::{SmallVec, smallvec};
use ty_module_resolver::{KnownModule, Module, ModuleName, resolve_module};
use type_ordering::union_or_intersection_elements_ordering;
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
pub use self::cyclic::CycleDetector;
pub(crate) use self::cyclic::{PairVisitor, TypeTransformer};
pub(crate) use self::diagnostic::register_lints;
pub use self::diagnostic::{TypeCheckDiagnostics, UNDEFINED_REVEAL, UNRESOLVED_REFERENCE};
pub(crate) use self::infer::{
TypeContext, infer_deferred_types, infer_definition_types, infer_expression_type,
infer_expression_types, infer_scope_types, static_expression_truthiness,
};
pub use self::signatures::ParameterKind;
pub(crate) use self::signatures::{CallableSignature, Signature};
pub(crate) use self::subclass_of::{SubclassOfInner, SubclassOfType};
pub use crate::diagnostic::add_inferred_python_version_hint_to_diagnostic;
use crate::place::{
Definedness, Place, PlaceAndQualifiers, TypeOrigin, Widening, builtins_module_scope,
imported_symbol, known_module_symbol,
};
use crate::semantic_index::definition::{Definition, DefinitionKind};
use crate::semantic_index::place::ScopedPlaceId;
use crate::semantic_index::scope::ScopeId;
use crate::semantic_index::{imported_modules, place_table, semantic_index};
use crate::suppression::check_suppressions;
use crate::types::bound_super::BoundSuperType;
use crate::types::builder::RecursivelyDefined;
use crate::types::call::{Binding, Bindings, CallArguments, CallableBinding};
pub(crate) use crate::types::class_base::ClassBase;
use crate::types::constraints::{
ConstraintSet, IteratorConstraintsExtension, OptionConstraintsExtension,
};
use crate::types::context::{LintDiagnosticGuard, LintDiagnosticGuardBuilder};
use crate::types::diagnostic::{INVALID_AWAIT, INVALID_TYPE_FORM, UNSUPPORTED_BOOL_CONVERSION};
pub use crate::types::display::{DisplaySettings, TypeDetail, TypeDisplayDetails};
use crate::types::enums::{enum_metadata, is_single_member_enum};
use crate::types::function::{
DataclassTransformerFlags, DataclassTransformerParams, FunctionDecorators, FunctionSpans,
FunctionType, KnownFunction,
};
pub(crate) use crate::types::generics::GenericContext;
use crate::types::generics::{
InferableTypeVars, PartialSpecialization, Specialization, SpecializationBuilder, bind_typevar,
typing_self, walk_generic_context,
};
use crate::types::mro::{Mro, MroError, MroIterator};
pub(crate) use crate::types::narrow::{NarrowingConstraint, infer_narrowing_constraint};
use crate::types::newtype::NewType;
pub(crate) use crate::types::signatures::{Parameter, Parameters};
use crate::types::signatures::{ParameterForm, walk_signature};
use crate::types::tuple::{Tuple, TupleSpec, TupleSpecBuilder};
pub(crate) use crate::types::typed_dict::{TypedDictParams, TypedDictType, walk_typed_dict_type};
pub use crate::types::variance::TypeVarVariance;
use crate::types::variance::VarianceInferable;
use crate::types::visitor::any_over_type;
use crate::unpack::EvaluationMode;
use crate::{Db, FxOrderSet, Program};
pub use class::KnownClass;
pub(crate) use class::{ClassLiteral, ClassType, GenericAlias};
use instance::Protocol;
pub use instance::{NominalInstanceType, ProtocolInstanceType};
pub use special_form::SpecialFormType;
mod bound_super;
mod builder;
mod call;
mod class;
mod class_base;
mod constraints;
mod context;
mod cyclic;
mod diagnostic;
mod display;
mod enums;
mod function;
mod generics;
pub mod ide_support;
mod infer;
mod instance;
pub mod list_members;
mod member;
mod mro;
mod narrow;
mod newtype;
mod overrides;
mod protocol_class;
mod signatures;
mod special_form;
mod string_annotation;
mod subclass_of;
mod tuple;
mod type_ordering;
mod typed_dict;
mod unpacker;
mod variance;
mod visitor;
mod definition;
#[cfg(test)]
mod property_tests;
pub fn check_types(db: &dyn Db, file: File) -> Vec<Diagnostic> {
let _span = tracing::trace_span!("check_types", ?file).entered();
tracing::debug!("Checking file '{path}'", path = file.path(db));
let start = Instant::now();
let index = semantic_index(db, file);
let mut diagnostics = TypeCheckDiagnostics::default();
for scope_id in index.scope_ids() {
let result = infer_scope_types(db, scope_id);
if let Some(scope_diagnostics) = result.diagnostics() {
diagnostics.extend(scope_diagnostics);
}
}
diagnostics.extend_diagnostics(
index
.semantic_syntax_errors()
.iter()
.map(|error| Diagnostic::invalid_syntax(file, error, error)),
);
let diagnostics = check_suppressions(db, file, diagnostics);
let elapsed = start.elapsed();
if elapsed >= Duration::from_millis(100) {
tracing::info!(
"Checking file `{path}` took more than 100ms ({elapsed:?})",
path = file.path(db)
);
}
diagnostics
}
/// Infer the type of a binding.
pub(crate) fn binding_type<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
let inference = infer_definition_types(db, definition);
inference.binding_type(definition)
}
/// Infer the type of a declaration.
pub(crate) fn declaration_type<'db>(
db: &'db dyn Db,
definition: Definition<'db>,
) -> TypeAndQualifiers<'db> {
let inference = infer_definition_types(db, definition);
inference.declaration_type(definition)
}
/// Infer the type of a (possibly deferred) sub-expression of a [`Definition`].
///
/// Supports expressions that are evaluated within a type-params sub-scope.
///
/// ## Panics
/// If the given expression is not a sub-expression of the given [`Definition`].
fn definition_expression_type<'db>(
db: &'db dyn Db,
definition: Definition<'db>,
expression: &ast::Expr,
) -> Type<'db> {
let file = definition.file(db);
let index = semantic_index(db, file);
let file_scope = index.expression_scope_id(expression);
let scope = file_scope.to_scope_id(db, file);
if scope == definition.scope(db) {
// expression is in the definition scope
let inference = infer_definition_types(db, definition);
if let Some(ty) = inference.try_expression_type(expression) {
ty
} else {
infer_deferred_types(db, definition).expression_type(expression)
}
} else {
// expression is in a type-params sub-scope
infer_scope_types(db, scope).expression_type(expression)
}
}
/// A [`TypeTransformer`] that is used in `apply_type_mapping` methods.
pub(crate) type ApplyTypeMappingVisitor<'db> = TypeTransformer<'db, TypeMapping<'db, 'db>>;
/// A [`PairVisitor`] that is used in `has_relation_to` methods.
pub(crate) type HasRelationToVisitor<'db> =
CycleDetector<TypeRelation<'db>, (Type<'db>, Type<'db>, TypeRelation<'db>), ConstraintSet<'db>>;
impl Default for HasRelationToVisitor<'_> {
fn default() -> Self {
HasRelationToVisitor::new(ConstraintSet::from(true))
}
}
/// A [`PairVisitor`] that is used in `is_disjoint_from` methods.
pub(crate) type IsDisjointVisitor<'db> = PairVisitor<'db, IsDisjoint, ConstraintSet<'db>>;
#[derive(Debug)]
pub(crate) struct IsDisjoint;
impl Default for IsDisjointVisitor<'_> {
fn default() -> Self {
IsDisjointVisitor::new(ConstraintSet::from(false))
}
}
/// A [`PairVisitor`] that is used in `is_equivalent` methods.
pub(crate) type IsEquivalentVisitor<'db> = PairVisitor<'db, IsEquivalent, ConstraintSet<'db>>;
#[derive(Debug)]
pub(crate) struct IsEquivalent;
impl Default for IsEquivalentVisitor<'_> {
fn default() -> Self {
IsEquivalentVisitor::new(ConstraintSet::from(true))
}
}
/// A [`CycleDetector`] that is used in `find_legacy_typevars` methods.
pub(crate) type FindLegacyTypeVarsVisitor<'db> = CycleDetector<FindLegacyTypeVars, Type<'db>, ()>;
#[derive(Debug)]
pub(crate) struct FindLegacyTypeVars;
/// A [`CycleDetector`] that is used in `try_bool` methods.
pub(crate) type TryBoolVisitor<'db> =
CycleDetector<TryBool, Type<'db>, Result<Truthiness, BoolError<'db>>>;
pub(crate) struct TryBool;
/// A [`CycleDetector`] that is used in `visit_specialization` methods.
pub(crate) type SpecializationVisitor<'db> = CycleDetector<VisitSpecialization, Type<'db>, ()>;
pub(crate) struct VisitSpecialization;
/// A [`TypeTransformer`] that is used in `normalized` methods.
pub(crate) type NormalizedVisitor<'db> = TypeTransformer<'db, Normalized>;
#[derive(Debug)]
pub(crate) struct Normalized;
/// How a generic type has been specialized.
///
/// This matters only if there is at least one invariant type parameter.
/// For example, we represent `Top[list[Any]]` as a `GenericAlias` with
/// `MaterializationKind` set to Top, which we denote as `Top[list[Any]]`.
/// A type `Top[list[T]]` includes all fully static list types `list[U]` where `U` is
/// a supertype of `Bottom[T]` and a subtype of `Top[T]`.
///
/// Similarly, there is `Bottom[list[Any]]`.
/// This type is harder to make sense of in a set-theoretic framework, but
/// it is a subtype of all materializations of `list[Any]`.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
pub enum MaterializationKind {
Top,
Bottom,
}
impl MaterializationKind {
/// Flip the materialization type: `Top` becomes `Bottom` and vice versa.
#[must_use]
pub const fn flip(self) -> Self {
match self {
Self::Top => Self::Bottom,
Self::Bottom => Self::Top,
}
}
}
/// The descriptor protocol distinguishes two kinds of descriptors. Non-data descriptors
/// define a `__get__` method, while data descriptors additionally define a `__set__`
/// method or a `__delete__` method. This enum is used to categorize attributes into two
/// groups: (1) data descriptors and (2) normal attributes or non-data descriptors.
#[derive(Clone, Debug, Copy, PartialEq, Eq, Hash, salsa::Update, get_size2::GetSize)]
pub(crate) enum AttributeKind {
DataDescriptor,
NormalOrNonDataDescriptor,
}
impl AttributeKind {
const fn is_data(self) -> bool {
matches!(self, Self::DataDescriptor)
}
}
/// This enum is used to control the behavior of the descriptor protocol implementation.
/// When invoked on a class object, the fallback type (a class attribute) can shadow a
/// non-data descriptor of the meta-type (the class's metaclass). However, this is not
/// true for instances. When invoked on an instance, the fallback type (an attribute on
/// the instance) cannot completely shadow a non-data descriptor of the meta-type (the
/// class), because we do not currently attempt to statically infer if an instance
/// attribute is definitely defined (i.e. to check whether a particular method has been
/// called).
#[derive(Clone, Debug, Copy, PartialEq)]
enum InstanceFallbackShadowsNonDataDescriptor {
Yes,
No,
}
bitflags! {
#[derive(Clone, Debug, Copy, PartialEq, Eq, Hash)]
pub(crate) struct MemberLookupPolicy: u8 {
/// Dunder methods are looked up on the meta-type of a type without potentially falling
/// back on attributes on the type itself. For example, when implicitly invoked on an
/// instance, dunder methods are not looked up as instance attributes. And when invoked
/// on a class, dunder methods are only looked up on the metaclass, not the class itself.
///
/// All other attributes use the `WithInstanceFallback` policy.
///
/// If this flag is set - look up the attribute on the meta-type only.
const NO_INSTANCE_FALLBACK = 1 << 0;
/// When looking up an attribute on a class, we sometimes need to avoid
/// looking up attributes defined on the `object` class. Usually because
/// typeshed doesn't properly encode runtime behavior (e.g. see how `__new__` & `__init__`
/// are handled during class creation).
///
/// If this flag is set - exclude attributes defined on `object` when looking up attributes.
const MRO_NO_OBJECT_FALLBACK = 1 << 1;
/// When looking up an attribute on a class, we sometimes need to avoid
/// looking up attributes defined on `type` if this is the metaclass of the class.
///
/// This is similar to no object fallback above
const META_CLASS_NO_TYPE_FALLBACK = 1 << 2;
/// Skip looking up attributes on the builtin `int` and `str` classes.
const MRO_NO_INT_OR_STR_LOOKUP = 1 << 3;
/// Do not call `__getattr__` during member lookup.
const NO_GETATTR_LOOKUP = 1 << 4;
}
}
impl MemberLookupPolicy {
/// Only look up the attribute on the meta-type.
///
/// If false - Look up the attribute on the meta-type, but fall back to attributes on the instance
/// if the meta-type attribute is not found or if the meta-type attribute is not a data
/// descriptor.
pub(crate) const fn no_instance_fallback(self) -> bool {
self.contains(Self::NO_INSTANCE_FALLBACK)
}
/// Exclude attributes defined on `object` when looking up attributes.
pub(crate) const fn mro_no_object_fallback(self) -> bool {
self.contains(Self::MRO_NO_OBJECT_FALLBACK)
}
/// Exclude attributes defined on `type` when looking up meta-class-attributes.
pub(crate) const fn meta_class_no_type_fallback(self) -> bool {
self.contains(Self::META_CLASS_NO_TYPE_FALLBACK)
}
/// Exclude attributes defined on `int` or `str` when looking up attributes.
pub(crate) const fn mro_no_int_or_str_fallback(self) -> bool {
self.contains(Self::MRO_NO_INT_OR_STR_LOOKUP)
}
/// Do not call `__getattr__` during member lookup.
pub(crate) const fn no_getattr_lookup(self) -> bool {
self.contains(Self::NO_GETATTR_LOOKUP)
}
}
impl Default for MemberLookupPolicy {
fn default() -> Self {
Self::empty()
}
}
fn member_lookup_cycle_initial<'db>(
_db: &'db dyn Db,
id: salsa::Id,
_self: Type<'db>,
_name: Name,
_policy: MemberLookupPolicy,
) -> PlaceAndQualifiers<'db> {
Place::bound(Type::divergent(id)).into()
}
fn member_lookup_cycle_recover<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_member: &PlaceAndQualifiers<'db>,
member: PlaceAndQualifiers<'db>,
_self_type: Type<'db>,
_name: Name,
_policy: MemberLookupPolicy,
) -> PlaceAndQualifiers<'db> {
member.cycle_normalized(db, *previous_member, cycle)
}
fn class_lookup_cycle_initial<'db>(
_db: &'db dyn Db,
id: salsa::Id,
_self: Type<'db>,
_name: Name,
_policy: MemberLookupPolicy,
) -> PlaceAndQualifiers<'db> {
Place::bound(Type::divergent(id)).into()
}
fn class_lookup_cycle_recover<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_member: &PlaceAndQualifiers<'db>,
member: PlaceAndQualifiers<'db>,
_self_type: Type<'db>,
_name: Name,
_policy: MemberLookupPolicy,
) -> PlaceAndQualifiers<'db> {
member.cycle_normalized(db, *previous_member, cycle)
}
fn variance_cycle_initial<'db, T>(
_db: &'db dyn Db,
_id: salsa::Id,
_self: T,
_typevar: BoundTypeVarInstance<'db>,
) -> TypeVarVariance {
TypeVarVariance::Bivariant
}
/// Meta data for `Type::Todo`, which represents a known limitation in ty.
#[cfg(debug_assertions)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize)]
pub struct TodoType(pub &'static str);
#[cfg(debug_assertions)]
impl std::fmt::Display for TodoType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "({msg})", msg = self.0)
}
}
#[cfg(not(debug_assertions))]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize)]
pub struct TodoType;
#[cfg(not(debug_assertions))]
impl std::fmt::Display for TodoType {
fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Ok(())
}
}
/// Create a `Type::Todo` variant to represent a known limitation in the type system.
///
/// It can be created by specifying a custom message: `todo_type!("PEP 604 not supported")`.
#[cfg(debug_assertions)]
macro_rules! todo_type {
($message:literal) => {{
const _: () = {
let s = $message;
if !s.is_ascii() {
panic!("todo_type! message must be ASCII");
}
let bytes = s.as_bytes();
let mut i = 0;
while i < bytes.len() {
// Check each byte for '(' or ')'
let ch = bytes[i];
assert!(
!40u8.eq_ignore_ascii_case(&ch) && !41u8.eq_ignore_ascii_case(&ch),
"todo_type! message must not contain parentheses",
);
i += 1;
}
};
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo($crate::types::TodoType(
$message,
)))
}};
($message:ident) => {
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo($crate::types::TodoType(
$message,
)))
};
}
#[cfg(not(debug_assertions))]
macro_rules! todo_type {
() => {
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo(crate::types::TodoType))
};
($message:literal) => {
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo(crate::types::TodoType))
};
($message:ident) => {
$crate::types::Type::Dynamic($crate::types::DynamicType::Todo(crate::types::TodoType))
};
}
pub use crate::types::definition::TypeDefinition;
pub(crate) use todo_type;
/// Represents an instance of `builtins.property`.
///
/// # Ordering
/// Ordering is based on the property instance's salsa-assigned id and not on its values.
/// The id may change between runs, or when the property instance was garbage collected and recreated.
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
#[derive(PartialOrd, Ord)]
pub struct PropertyInstanceType<'db> {
getter: Option<Type<'db>>,
setter: Option<Type<'db>>,
}
fn walk_property_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
db: &'db dyn Db,
property: PropertyInstanceType<'db>,
visitor: &V,
) {
if let Some(getter) = property.getter(db) {
visitor.visit_type(db, getter);
}
if let Some(setter) = property.setter(db) {
visitor.visit_type(db, setter);
}
}
// The Salsa heap is tracked separately.
impl get_size2::GetSize for PropertyInstanceType<'_> {}
impl<'db> PropertyInstanceType<'db> {
fn apply_type_mapping_impl<'a>(
self,
db: &'db dyn Db,
type_mapping: &TypeMapping<'a, 'db>,
tcx: TypeContext<'db>,
visitor: &ApplyTypeMappingVisitor<'db>,
) -> Self {
let getter = self
.getter(db)
.map(|ty| ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor));
let setter = self
.setter(db)
.map(|ty| ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor));
Self::new(db, getter, setter)
}
fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
Self::new(
db,
self.getter(db).map(|ty| ty.normalized_impl(db, visitor)),
self.setter(db).map(|ty| ty.normalized_impl(db, visitor)),
)
}
fn recursive_type_normalized_impl(
self,
db: &'db dyn Db,
div: Type<'db>,
nested: bool,
) -> Option<Self> {
let getter = match self.getter(db) {
Some(ty) if nested => Some(ty.recursive_type_normalized_impl(db, div, true)?),
Some(ty) => Some(
ty.recursive_type_normalized_impl(db, div, true)
.unwrap_or(div),
),
None => None,
};
let setter = match self.setter(db) {
Some(ty) if nested => Some(ty.recursive_type_normalized_impl(db, div, true)?),
Some(ty) => Some(
ty.recursive_type_normalized_impl(db, div, true)
.unwrap_or(div),
),
None => None,
};
Some(Self::new(db, getter, setter))
}
fn find_legacy_typevars_impl(
self,
db: &'db dyn Db,
binding_context: Option<Definition<'db>>,
typevars: &mut FxOrderSet<BoundTypeVarInstance<'db>>,
visitor: &FindLegacyTypeVarsVisitor<'db>,
) {
if let Some(ty) = self.getter(db) {
ty.find_legacy_typevars_impl(db, binding_context, typevars, visitor);
}
if let Some(ty) = self.setter(db) {
ty.find_legacy_typevars_impl(db, binding_context, typevars, visitor);
}
}
fn when_equivalent_to(
self,
db: &'db dyn Db,
other: Self,
inferable: InferableTypeVars<'_, 'db>,
) -> ConstraintSet<'db> {
self.is_equivalent_to_impl(db, other, inferable, &IsEquivalentVisitor::default())
}
fn is_equivalent_to_impl(
self,
db: &'db dyn Db,
other: Self,
inferable: InferableTypeVars<'_, 'db>,
visitor: &IsEquivalentVisitor<'db>,
) -> ConstraintSet<'db> {
let getter_equivalence = if let Some(getter) = self.getter(db) {
let Some(other_getter) = other.getter(db) else {
return ConstraintSet::from(false);
};
getter.is_equivalent_to_impl(db, other_getter, inferable, visitor)
} else {
if other.getter(db).is_some() {
return ConstraintSet::from(false);
}
ConstraintSet::from(true)
};
let setter_equivalence = || {
if let Some(setter) = self.setter(db) {
let Some(other_setter) = other.setter(db) else {
return ConstraintSet::from(false);
};
setter.is_equivalent_to_impl(db, other_setter, inferable, visitor)
} else {
if other.setter(db).is_some() {
return ConstraintSet::from(false);
}
ConstraintSet::from(true)
}
};
getter_equivalence.and(db, setter_equivalence)
}
}
bitflags! {
/// Used to store metadata about a dataclass or dataclass-like class.
/// For the precise meaning of the fields, see [1].
///
/// [1]: https://docs.python.org/3/library/dataclasses.html
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct DataclassFlags: u16 {
const INIT = 1 << 0;
const REPR = 1 << 1;
const EQ = 1 << 2;
const ORDER = 1 << 3;
const UNSAFE_HASH = 1 << 4;
const FROZEN = 1 << 5;
const MATCH_ARGS = 1 << 6;
const KW_ONLY = 1 << 7;
const SLOTS = 1 << 8 ;
const WEAKREF_SLOT = 1 << 9;
}
}
impl DataclassFlags {
pub(crate) const fn is_frozen(self) -> bool {
self.contains(Self::FROZEN)
}
}
pub(crate) const DATACLASS_FLAGS: &[(&str, DataclassFlags)] = &[
("init", DataclassFlags::INIT),
("repr", DataclassFlags::REPR),
("eq", DataclassFlags::EQ),
("order", DataclassFlags::ORDER),
("unsafe_hash", DataclassFlags::UNSAFE_HASH),
("frozen", DataclassFlags::FROZEN),
("match_args", DataclassFlags::MATCH_ARGS),
("kw_only", DataclassFlags::KW_ONLY),
("slots", DataclassFlags::SLOTS),
("weakref_slot", DataclassFlags::WEAKREF_SLOT),
];
impl get_size2::GetSize for DataclassFlags {}
impl Default for DataclassFlags {
fn default() -> Self {
Self::INIT | Self::REPR | Self::EQ | Self::MATCH_ARGS
}
}
impl From<DataclassTransformerFlags> for DataclassFlags {
fn from(params: DataclassTransformerFlags) -> Self {
let mut result = Self::default();
result.set(
Self::EQ,
params.contains(DataclassTransformerFlags::EQ_DEFAULT),
);
result.set(
Self::ORDER,
params.contains(DataclassTransformerFlags::ORDER_DEFAULT),
);
result.set(
Self::KW_ONLY,
params.contains(DataclassTransformerFlags::KW_ONLY_DEFAULT),
);
result.set(
Self::FROZEN,
params.contains(DataclassTransformerFlags::FROZEN_DEFAULT),
);
result
}
}
/// Metadata for a dataclass. Stored inside a `Type::DataclassDecorator(…)`
/// instance that we use as the return type of a `dataclasses.dataclass` and
/// dataclass-transformer decorator calls.
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
#[derive(PartialOrd, Ord)]
pub struct DataclassParams<'db> {
flags: DataclassFlags,
#[returns(deref)]
field_specifiers: Box<[Type<'db>]>,
}
impl get_size2::GetSize for DataclassParams<'_> {}
impl<'db> DataclassParams<'db> {
fn default_params(db: &'db dyn Db) -> Self {
Self::from_flags(db, DataclassFlags::default())
}
fn from_flags(db: &'db dyn Db, flags: DataclassFlags) -> Self {
let dataclasses_field = known_module_symbol(db, KnownModule::Dataclasses, "field")
.place
.ignore_possibly_undefined()
.unwrap_or_else(Type::unknown);
Self::new(db, flags, vec![dataclasses_field].into_boxed_slice())
}
fn from_transformer_params(db: &'db dyn Db, params: DataclassTransformerParams<'db>) -> Self {
Self::new(
db,
DataclassFlags::from(params.flags(db)),
params.field_specifiers(db),
)
}
}
/// Representation of a type: a set of possible values at runtime.
///
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, salsa::Update, get_size2::GetSize)]
pub enum Type<'db> {
/// The dynamic type: a statically unknown set of values
Dynamic(DynamicType<'db>),
/// The empty set of values
Never,
/// A specific function object
FunctionLiteral(FunctionType<'db>),
/// Represents a callable `instance.method` where `instance` is an instance of a class
/// and `method` is a method (of that class).
///
/// See [`BoundMethodType`] for more information.
///
/// TODO: consider replacing this with `Callable & Instance(MethodType)`?
/// I.e. if we have a method `def f(self, x: int) -> str`, and see it being called as
/// `instance.f`, we could partially apply (and check) the `instance` argument against
/// the `self` parameter, and return a `MethodType & Callable[[int], str]`.
/// One drawback would be that we could not show the bound instance when that type is displayed.
BoundMethod(BoundMethodType<'db>),
/// Represents a specific instance of a bound method type for a builtin class.
///
/// TODO: consider replacing this with `Callable & types.MethodWrapperType` type?
/// The `Callable` type would need to be overloaded -- e.g. `types.FunctionType.__get__` has
/// this behaviour when a method is accessed on a class vs an instance:
///
/// ```txt
/// * (None, type) -> Literal[function_on_which_it_was_called]
/// * (object, type | None) -> BoundMethod[instance, function_on_which_it_was_called]
/// ```
KnownBoundMethod(KnownBoundMethodType<'db>),
/// Represents a specific instance of `types.WrapperDescriptorType`.
///
/// TODO: Similar to above, this could eventually be replaced by a generic `Callable`
/// type.
WrapperDescriptor(WrapperDescriptorKind),
/// A special callable that is returned by a `dataclass(…)` call. It is usually
/// used as a decorator. Note that this is only used as a return type for actual
/// `dataclass` calls, not for the argumentless `@dataclass` decorator.
DataclassDecorator(DataclassParams<'db>),
/// A special callable that is returned by a `dataclass_transform(…)` call.
DataclassTransformer(DataclassTransformerParams<'db>),
/// The type of an arbitrary callable object with a certain specified signature.
Callable(CallableType<'db>),
/// A specific module object
ModuleLiteral(ModuleLiteralType<'db>),
/// A specific class object
ClassLiteral(ClassLiteral<'db>),
/// A specialization of a generic class
GenericAlias(GenericAlias<'db>),
/// The set of all class objects that are subclasses of the given class (C), spelled `type[C]`.
SubclassOf(SubclassOfType<'db>),
/// The set of Python objects with the given class in their __class__'s method resolution order.
/// Construct this variant using the `Type::instance` constructor function.
NominalInstance(NominalInstanceType<'db>),
/// The set of Python objects that conform to the interface described by a given protocol.
/// Construct this variant using the `Type::instance` constructor function.
ProtocolInstance(ProtocolInstanceType<'db>),
/// A single Python object that requires special treatment in the type system,
/// and which exists at a location that can be known prior to any analysis by ty.
SpecialForm(SpecialFormType),
/// Singleton types that are heavily special-cased by ty, and which are usually
/// created as a result of some runtime operation (e.g. a type-alias statement,
/// a typevar definition, or `Generic[T]` in a class's bases list).
KnownInstance(KnownInstanceType<'db>),
/// An instance of `builtins.property`
PropertyInstance(PropertyInstanceType<'db>),
/// The set of objects in any of the types in the union
Union(UnionType<'db>),
/// The set of objects in all of the types in the intersection
Intersection(IntersectionType<'db>),
/// Represents objects whose `__bool__` method is deterministic:
/// - `AlwaysTruthy`: `__bool__` always returns `True`
/// - `AlwaysFalsy`: `__bool__` always returns `False`
AlwaysTruthy,
AlwaysFalsy,
/// An integer literal
IntLiteral(i64),
/// A boolean literal, either `True` or `False`.
BooleanLiteral(bool),
/// A string literal whose value is known
StringLiteral(StringLiteralType<'db>),
/// A singleton type that represents a specific enum member
EnumLiteral(EnumLiteralType<'db>),
/// A string known to originate only from literal values, but whose value is not known (unlike
/// `StringLiteral` above).
LiteralString,
/// A bytes literal
BytesLiteral(BytesLiteralType<'db>),
/// An instance of a typevar. When the generic class or function binding this typevar is
/// specialized, we will replace the typevar with its specialization.
TypeVar(BoundTypeVarInstance<'db>),
/// A bound super object like `super()` or `super(A, A())`
/// This type doesn't handle an unbound super object like `super(A)`; for that we just use
/// a `Type::NominalInstance` of `builtins.super`.
BoundSuper(BoundSuperType<'db>),
/// A subtype of `bool` that allows narrowing in both positive and negative cases.
TypeIs(TypeIsType<'db>),
/// A subtype of `bool` that allows narrowing in only the positive case.
TypeGuard(TypeGuardType<'db>),
/// A type that represents an inhabitant of a `TypedDict`.
TypedDict(TypedDictType<'db>),
/// An aliased type (lazily not-yet-unpacked to its value type).
TypeAlias(TypeAliasType<'db>),
/// The set of Python objects that belong to a `typing.NewType` subtype. Note that
/// `typing.NewType` itself is a `Type::ClassLiteral` with `KnownClass::NewType`, and the
/// identity callables it returns (which behave like subtypes in type expressions) are of
/// `Type::KnownInstance` with `KnownInstanceType::NewType`. This `Type` refers to the objects
/// wrapped/returned by a specific one of those identity callables, or by another that inherits
/// from it.
NewTypeInstance(NewType<'db>),
}
/// Helper for `recursive_type_normalized_impl` for `TypeGuardLike` types.
fn recursive_type_normalize_type_guard_like<'db, T: TypeGuardLike<'db>>(
db: &'db dyn Db,
guard: T,
div: Type<'db>,
nested: bool,
) -> Option<Type<'db>> {
let ty = if nested {
guard
.return_type(db)
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | true |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/ast_node_ref.rs | crates/ty_python_semantic/src/ast_node_ref.rs | use std::fmt::Debug;
use std::marker::PhantomData;
#[cfg(debug_assertions)]
use ruff_db::files::File;
use ruff_db::parsed::ParsedModuleRef;
use ruff_python_ast::{AnyNodeRef, NodeIndex};
use ruff_python_ast::{AnyRootNodeRef, HasNodeIndex};
use ruff_text_size::Ranged;
/// Reference to an AST node.
///
/// This type acts as a reference to an AST node within a given module that remains
/// stable regardless of whether the AST is garbage collected. As such, accessing a
/// node through the [`AstNodeRef`] requires a reference to the current [`ParsedModuleRef`]
/// for the module containing the node.
///
/// ## Usage in salsa tracked structs
/// It's important that [`AstNodeRef`] fields in salsa tracked structs are tracked fields
/// (attributed with `#[tracked`]). It prevents that the tracked struct gets a new ID
/// every time the AST changes, which in turn, invalidates the result of any query
/// that takes said tracked struct as a query argument or returns the tracked struct as part of its result.
///
/// For example, marking the [`AstNodeRef`] as tracked on `Expression`
/// has the effect that salsa will consider the expression as "unchanged" for as long as it:
///
/// * belongs to the same file
/// * belongs to the same scope
/// * has the same kind
/// * was created in the same order
///
/// This means that changes to expressions in other scopes don't invalidate the expression's id, giving
/// us some form of scope-stable identity for expressions. Only queries accessing the node field
/// run on every AST change. All other queries only run when the expression's identity changes.
#[derive(Clone)]
pub struct AstNodeRef<T> {
/// The index of the node in the AST.
index: NodeIndex,
/// Debug information.
#[cfg(debug_assertions)]
kind: ruff_python_ast::NodeKind,
#[cfg(debug_assertions)]
range: ruff_text_size::TextRange,
// Note that because the module address is not stored in release builds, `AstNodeRef`
// cannot implement `Eq`, as indices are only unique within a given instance of the
// AST.
#[cfg(debug_assertions)]
file: File,
_node: PhantomData<T>,
}
impl<T> AstNodeRef<T> {
pub(crate) fn index(&self) -> NodeIndex {
self.index
}
}
impl<T> AstNodeRef<T>
where
T: HasNodeIndex + Ranged + PartialEq + Debug,
for<'ast> AnyNodeRef<'ast>: From<&'ast T>,
for<'ast> &'ast T: TryFrom<AnyRootNodeRef<'ast>>,
{
/// Creates a new `AstNodeRef` that references `node`.
///
/// This method may panic or produce unspecified results if the provided module is from a
/// different file or Salsa revision than the module to which the node belongs.
pub(super) fn new(module_ref: &ParsedModuleRef, node: &T) -> Self {
let index = node.node_index().load();
debug_assert_eq!(module_ref.get_by_index(index).try_into().ok(), Some(node));
Self {
index,
#[cfg(debug_assertions)]
file: module_ref.module().file(),
#[cfg(debug_assertions)]
kind: AnyNodeRef::from(node).kind(),
#[cfg(debug_assertions)]
range: node.range(),
_node: PhantomData,
}
}
/// Returns a reference to the wrapped node.
///
/// This method may panic or produce unspecified results if the provided module is from a
/// different file or Salsa revision than the module to which the node belongs.
#[track_caller]
pub fn node<'ast>(&self, module_ref: &'ast ParsedModuleRef) -> &'ast T {
#[cfg(debug_assertions)]
assert_eq!(module_ref.module().file(), self.file);
// The user guarantees that the module is from the same file and Salsa
// revision, so the file contents cannot have changed.
module_ref
.get_by_index(self.index)
.try_into()
.ok()
.expect("AST indices should never change within the same revision")
}
}
#[expect(unsafe_code)]
unsafe impl<T> salsa::Update for AstNodeRef<T> {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
let old_ref = unsafe { &mut (*old_pointer) };
// The equality of an `AstNodeRef` depends on both the module address and the node index,
// but the former is not stored in release builds to save memory. As such, AST nodes
// are always considered change when the AST is reparsed, which is acceptable because
// any change to the AST is likely to invalidate most node indices anyways.
*old_ref = new_value;
true
}
}
impl<T> get_size2::GetSize for AstNodeRef<T> {}
#[allow(clippy::missing_fields_in_debug)]
impl<T> Debug for AstNodeRef<T>
where
T: Debug,
for<'ast> &'ast T: TryFrom<AnyRootNodeRef<'ast>>,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
#[cfg(debug_assertions)]
{
f.debug_struct("AstNodeRef")
.field("kind", &self.kind)
.field("range", &self.range)
.finish()
}
#[cfg(not(debug_assertions))]
{
// Unfortunately we have no access to the AST here.
f.debug_tuple("AstNodeRef").finish_non_exhaustive()
}
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/python_platform.rs | crates/ty_python_semantic/src/python_platform.rs | use std::fmt::{Display, Formatter};
use ty_combine::Combine;
/// The target platform to assume when resolving types.
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
#[cfg_attr(
feature = "serde",
derive(serde::Serialize, serde::Deserialize, ruff_macros::RustDoc),
serde(rename_all = "kebab-case")
)]
pub enum PythonPlatform {
/// Do not make any assumptions about the target platform.
All,
/// Assume a specific target platform like `linux`, `darwin` or `win32`.
///
/// We use a string (instead of individual enum variants), as the set of possible platforms
/// may change over time. See <https://docs.python.org/3/library/sys.html#sys.platform> for
/// some known platform identifiers.
#[cfg_attr(feature = "serde", serde(untagged))]
Identifier(String),
}
impl From<String> for PythonPlatform {
fn from(platform: String) -> Self {
match platform.as_str() {
"all" => PythonPlatform::All,
_ => PythonPlatform::Identifier(platform.clone()),
}
}
}
impl Display for PythonPlatform {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
PythonPlatform::All => f.write_str("all"),
PythonPlatform::Identifier(name) => f.write_str(name),
}
}
}
impl Default for PythonPlatform {
fn default() -> Self {
if cfg!(target_os = "windows") {
PythonPlatform::Identifier("win32".to_string())
} else if cfg!(target_os = "macos") {
PythonPlatform::Identifier("darwin".to_string())
} else if cfg!(target_os = "android") {
PythonPlatform::Identifier("android".to_string())
} else if cfg!(target_os = "ios") {
PythonPlatform::Identifier("ios".to_string())
} else {
PythonPlatform::Identifier("linux".to_string())
}
}
}
impl Combine for PythonPlatform {
fn combine_with(&mut self, _other: Self) {}
}
#[cfg(feature = "schemars")]
mod schema {
use crate::PythonPlatform;
use ruff_db::RustDoc;
use schemars::{JsonSchema, Schema, SchemaGenerator};
use serde_json::Value;
impl JsonSchema for PythonPlatform {
fn schema_name() -> std::borrow::Cow<'static, str> {
std::borrow::Cow::Borrowed("PythonPlatform")
}
fn json_schema(_gen: &mut SchemaGenerator) -> Schema {
fn constant(value: &str, description: &str) -> Value {
let mut schema = schemars::json_schema!({ "const": value });
schema.ensure_object().insert(
"description".to_string(),
Value::String(description.to_string()),
);
schema.into()
}
// Hard code some well known values, but allow any other string as well.
let mut any_of = vec![schemars::json_schema!({ "type": "string" }).into()];
// Promote well-known values for better auto-completion.
// Using `const` over `enumValues` as recommended [here](https://github.com/SchemaStore/schemastore/blob/master/CONTRIBUTING.md#documenting-enums).
any_of.push(constant(
"all",
"Do not make any assumptions about the target platform.",
));
any_of.push(constant("darwin", "Darwin"));
any_of.push(constant("linux", "Linux"));
any_of.push(constant("win32", "Windows"));
let mut schema = Schema::default();
let object = schema.ensure_object();
object.insert("anyOf".to_string(), Value::Array(any_of));
object.insert(
"description".to_string(),
Value::String(<PythonPlatform as RustDoc>::rust_doc().to_string()),
);
schema
}
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/semantic_index.rs | crates/ty_python_semantic/src/semantic_index.rs | use std::iter::{FusedIterator, once};
use std::sync::Arc;
use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_index::{IndexSlice, IndexVec};
use ruff_python_ast::NodeIndex;
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa::Update;
use salsa::plumbing::AsId;
use ty_module_resolver::ModuleName;
use crate::Db;
use crate::node_key::NodeKey;
use crate::semantic_index::ast_ids::AstIds;
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
use crate::semantic_index::builder::SemanticIndexBuilder;
use crate::semantic_index::definition::{Definition, DefinitionNodeKey, Definitions};
use crate::semantic_index::expression::Expression;
use crate::semantic_index::narrowing_constraints::ScopedNarrowingConstraint;
use crate::semantic_index::place::{PlaceExprRef, PlaceTable};
pub use crate::semantic_index::scope::FileScopeId;
use crate::semantic_index::scope::{
NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopeKind, ScopeLaziness,
};
use crate::semantic_index::symbol::ScopedSymbolId;
use crate::semantic_index::use_def::{EnclosingSnapshotKey, ScopedEnclosingSnapshotId, UseDefMap};
use crate::semantic_model::HasTrackedScope;
pub mod ast_ids;
mod builder;
pub mod definition;
pub mod expression;
pub(crate) mod member;
pub(crate) mod narrowing_constraints;
pub mod place;
pub(crate) mod predicate;
mod re_exports;
mod reachability_constraints;
pub(crate) mod scope;
pub(crate) mod symbol;
mod use_def;
pub(crate) use self::use_def::{
ApplicableConstraints, BindingWithConstraints, BindingWithConstraintsIterator,
DeclarationWithConstraint, DeclarationsIterator,
};
/// Returns the semantic index for `file`.
///
/// Prefer using [`symbol_table`] when working with symbols from a single scope.
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
let _span = tracing::trace_span!("semantic_index", ?file).entered();
let module = parsed_module(db, file).load(db);
SemanticIndexBuilder::new(db, file, &module).build()
}
/// Returns the place table for a specific `scope`.
///
/// Using [`place_table`] over [`semantic_index`] has the advantage that
/// Salsa can avoid invalidating dependent queries if this scope's place table
/// is unchanged.
#[salsa::tracked(returns(deref), heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn place_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<PlaceTable> {
let file = scope.file(db);
let _span = tracing::trace_span!("place_table", scope=?scope.as_id(), ?file).entered();
let index = semantic_index(db, file);
Arc::clone(&index.place_tables[scope.file_scope_id(db)])
}
/// Returns the set of modules that are imported anywhere in `file`.
///
/// This set only considers `import` statements, not `from...import` statements.
/// See [`ModuleLiteralType::available_submodule_attributes`] for discussion
/// of why this analysis is intentionally limited.
#[salsa::tracked(returns(deref), heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn imported_modules<'db>(db: &'db dyn Db, file: File) -> Arc<FxHashSet<ModuleName>> {
semantic_index(db, file).imported_modules.clone()
}
/// Returns the use-def map for a specific `scope`.
///
/// Using [`use_def_map`] over [`semantic_index`] has the advantage that
/// Salsa can avoid invalidating dependent queries if this scope's use-def map
/// is unchanged.
#[salsa::tracked(returns(deref), heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
let file = scope.file(db);
let _span = tracing::trace_span!("use_def_map", scope=?scope.as_id(), ?file).entered();
let index = semantic_index(db, file);
Arc::clone(&index.use_def_maps[scope.file_scope_id(db)])
}
/// Returns all attribute assignments (and their method scope IDs) with a symbol name matching
/// the one given for a specific class body scope.
///
/// Only call this when doing type inference on the same file as `class_body_scope`, otherwise it
/// introduces a direct dependency on that file's AST.
pub(crate) fn attribute_assignments<'db, 's>(
db: &'db dyn Db,
class_body_scope: ScopeId<'db>,
name: &'s str,
) -> impl Iterator<Item = (BindingWithConstraintsIterator<'db, 'db>, FileScopeId)> + use<'s, 'db> {
let file = class_body_scope.file(db);
let index = semantic_index(db, file);
attribute_scopes(db, class_body_scope).filter_map(|function_scope_id| {
let place_table = index.place_table(function_scope_id);
let member = place_table.member_id_by_instance_attribute_name(name)?;
let use_def = &index.use_def_maps[function_scope_id];
Some((use_def.reachable_member_bindings(member), function_scope_id))
})
}
/// Returns all attribute declarations (and their method scope IDs) with a symbol name matching
/// the one given for a specific class body scope.
///
/// Only call this when doing type inference on the same file as `class_body_scope`, otherwise it
/// introduces a direct dependency on that file's AST.
pub(crate) fn attribute_declarations<'db, 's>(
db: &'db dyn Db,
class_body_scope: ScopeId<'db>,
name: &'s str,
) -> impl Iterator<Item = (DeclarationsIterator<'db, 'db>, FileScopeId)> + use<'s, 'db> {
let file = class_body_scope.file(db);
let index = semantic_index(db, file);
attribute_scopes(db, class_body_scope).filter_map(|function_scope_id| {
let place_table = index.place_table(function_scope_id);
let member = place_table.member_id_by_instance_attribute_name(name)?;
let use_def = &index.use_def_maps[function_scope_id];
Some((
use_def.reachable_member_declarations(member),
function_scope_id,
))
})
}
/// Returns all attribute assignments as scope IDs for a specific class body scope.
///
/// Only call this when doing type inference on the same file as `class_body_scope`, otherwise it
/// introduces a direct dependency on that file's AST.
pub(crate) fn attribute_scopes<'db>(
db: &'db dyn Db,
class_body_scope: ScopeId<'db>,
) -> impl Iterator<Item = FileScopeId> + 'db {
let file = class_body_scope.file(db);
let index = semantic_index(db, file);
let class_scope_id = class_body_scope.file_scope_id(db);
ChildrenIter::new(&index.scopes, class_scope_id)
.filter_map(move |(child_scope_id, scope)| {
let (function_scope_id, function_scope) =
if scope.node().scope_kind() == ScopeKind::TypeParams {
// This could be a generic method with a type-params scope.
// Go one level deeper to find the function scope. The first
// descendant is the (potential) function scope.
let function_scope_id = scope.descendants().start;
(function_scope_id, index.scope(function_scope_id))
} else {
(child_scope_id, scope)
};
function_scope.node().as_function()?;
Some(function_scope_id)
})
.flat_map(move |func_id| {
// Add any descendent scope that is eager and have eager scopes between the scope
// and the method scope. Since attributes can be defined in this scope.
let nested = index.descendent_scopes(func_id).filter_map(move |(id, s)| {
let is_eager = s.kind().is_eager();
let parents_are_eager = {
let mut all_parents_eager = true;
let mut current = Some(id);
while let Some(scope_id) = current {
if scope_id == func_id {
break;
}
let scope = index.scope(scope_id);
if !scope.is_eager() {
all_parents_eager = false;
break;
}
current = scope.parent();
}
all_parents_eager
};
(parents_are_eager && is_eager).then_some(id)
});
once(func_id).chain(nested)
})
}
/// Returns the module global scope of `file`.
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
let _span = tracing::trace_span!("global_scope", ?file).entered();
FileScopeId::global().to_scope_id(db, file)
}
pub(crate) enum EnclosingSnapshotResult<'map, 'db> {
FoundConstraint(ScopedNarrowingConstraint),
FoundBindings(BindingWithConstraintsIterator<'map, 'db>),
NotFound,
NoLongerInEagerContext,
}
/// The place tables and use-def maps for all scopes in a file.
#[derive(Debug, Update, get_size2::GetSize)]
pub(crate) struct SemanticIndex<'db> {
/// List of all place tables in this file, indexed by scope.
place_tables: IndexVec<FileScopeId, Arc<PlaceTable>>,
/// List of all scopes in this file.
scopes: IndexVec<FileScopeId, Scope>,
/// Map expressions to their corresponding scope.
scopes_by_expression: ExpressionsScopeMap,
/// Map from a node creating a definition to its definition.
definitions_by_node: FxHashMap<DefinitionNodeKey, Definitions<'db>>,
/// Map from a standalone expression to its [`Expression`] ingredient.
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
/// Map from nodes that create a scope to the scope they create.
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
/// Map from the file-local [`FileScopeId`] to the salsa-ingredient [`ScopeId`].
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
/// Use-def map for each scope in this file.
use_def_maps: IndexVec<FileScopeId, Arc<UseDefMap<'db>>>,
/// Lookup table to map between node ids and ast nodes.
///
/// Note: We should not depend on this map when analysing other files or
/// changing a file invalidates all dependents.
ast_ids: IndexVec<FileScopeId, AstIds>,
/// The set of modules that are imported anywhere within this file.
imported_modules: Arc<FxHashSet<ModuleName>>,
/// Flags about the global scope (code usage impacting inference)
has_future_annotations: bool,
/// Map of all of the enclosing snapshots that appear in this file.
enclosing_snapshots: FxHashMap<EnclosingSnapshotKey, ScopedEnclosingSnapshotId>,
/// List of all semantic syntax errors in this file.
semantic_syntax_errors: Vec<SemanticSyntaxError>,
/// Set of all generator functions in this file.
generator_functions: FxHashSet<FileScopeId>,
}
impl<'db> SemanticIndex<'db> {
/// Returns the place table for a specific scope.
///
/// Use the Salsa cached [`place_table()`] query if you only need the
/// place table for a single scope.
#[track_caller]
pub(super) fn place_table(&self, scope_id: FileScopeId) -> &PlaceTable {
&self.place_tables[scope_id]
}
/// Returns the use-def map for a specific scope.
///
/// Use the Salsa cached [`use_def_map()`] query if you only need the
/// use-def map for a single scope.
#[track_caller]
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> &UseDefMap<'db> {
&self.use_def_maps[scope_id]
}
#[track_caller]
pub(crate) fn ast_ids(&self, scope_id: FileScopeId) -> &AstIds {
&self.ast_ids[scope_id]
}
/// Returns the ID of the `expression`'s enclosing scope.
#[track_caller]
pub(crate) fn expression_scope_id<E>(&self, expression: &E) -> FileScopeId
where
E: HasTrackedScope,
{
self.try_expression_scope_id(expression)
.expect("Expression to be part of a scope if it is from the same module")
}
/// Returns the ID of the `expression`'s enclosing scope.
pub(crate) fn try_expression_scope_id<E>(&self, expression: &E) -> Option<FileScopeId>
where
E: HasTrackedScope,
{
self.scopes_by_expression.try_get(expression)
}
/// Returns the [`Scope`] of the `expression`'s enclosing scope.
#[allow(unused)]
#[track_caller]
pub(crate) fn expression_scope(&self, expression: &impl HasTrackedScope) -> &Scope {
&self.scopes[self.expression_scope_id(expression)]
}
/// Returns the [`Scope`] with the given id.
#[track_caller]
pub(crate) fn scope(&self, id: FileScopeId) -> &Scope {
&self.scopes[id]
}
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId<'db>> + '_ {
self.scope_ids_by_scope.iter().copied()
}
pub(crate) fn symbol_is_global_in_scope(
&self,
symbol: ScopedSymbolId,
scope: FileScopeId,
) -> bool {
self.place_table(scope).symbol(symbol).is_global()
}
pub(crate) fn symbol_is_nonlocal_in_scope(
&self,
symbol: ScopedSymbolId,
scope: FileScopeId,
) -> bool {
self.place_table(scope).symbol(symbol).is_nonlocal()
}
/// Returns the id of the parent scope.
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
let scope = self.scope(scope_id);
scope.parent()
}
/// Returns the parent scope of `scope_id`.
#[expect(unused)]
#[track_caller]
pub(crate) fn parent_scope(&self, scope_id: FileScopeId) -> Option<&Scope> {
Some(&self.scopes[self.parent_scope_id(scope_id)?])
}
/// Return the [`Definition`] of the class enclosing this method, given the
/// method's body scope, or `None` if it is not a method.
pub(crate) fn class_definition_of_method(
&self,
function_body_scope: FileScopeId,
) -> Option<Definition<'db>> {
let current_scope = self.scope(function_body_scope);
if current_scope.kind() != ScopeKind::Function {
return None;
}
let parent_scope_id = current_scope.parent()?;
let parent_scope = self.scope(parent_scope_id);
let class_scope = match parent_scope.kind() {
ScopeKind::Class => parent_scope,
ScopeKind::TypeParams => {
let class_scope_id = parent_scope.parent()?;
let potentially_class_scope = self.scope(class_scope_id);
match potentially_class_scope.kind() {
ScopeKind::Class => potentially_class_scope,
_ => return None,
}
}
_ => return None,
};
class_scope
.node()
.as_class()
.map(|node_ref| self.expect_single_definition(node_ref))
}
fn is_scope_reachable(&self, db: &'db dyn Db, scope_id: FileScopeId) -> bool {
self.parent_scope_id(scope_id)
.is_none_or(|parent_scope_id| {
if !self.is_scope_reachable(db, parent_scope_id) {
return false;
}
let parent_use_def = self.use_def_map(parent_scope_id);
let reachability = self.scope(scope_id).reachability();
parent_use_def.is_reachable(db, reachability)
})
}
/// Returns true if a given AST node is reachable from the start of the scope. For example,
/// in the following code, expression `2` is reachable, but expressions `1` and `3` are not:
/// ```py
/// def f():
/// x = 1
/// if False:
/// x # 1
/// x # 2
/// return
/// x # 3
/// ```
pub(crate) fn is_node_reachable(
&self,
db: &'db dyn crate::Db,
scope_id: FileScopeId,
node_key: NodeKey,
) -> bool {
self.is_scope_reachable(db, scope_id)
&& self.use_def_map(scope_id).is_node_reachable(db, node_key)
}
/// Returns an iterator over the descendent scopes of `scope`.
#[allow(unused)]
pub(crate) fn descendent_scopes(&self, scope: FileScopeId) -> DescendantsIter<'_> {
DescendantsIter::new(&self.scopes, scope)
}
/// Returns an iterator over the direct child scopes of `scope`.
#[allow(unused)]
pub(crate) fn child_scopes(&self, scope: FileScopeId) -> ChildrenIter<'_> {
ChildrenIter::new(&self.scopes, scope)
}
/// Returns an iterator over all ancestors of `scope`, starting with `scope` itself.
pub(crate) fn ancestor_scopes(&self, scope: FileScopeId) -> AncestorsIter<'_> {
AncestorsIter::new(&self.scopes, scope)
}
/// Returns an iterator over ancestors of `scope` that are visible for name resolution,
/// starting with `scope` itself. This follows Python's lexical scoping rules where
/// class scopes are skipped during name resolution (except for the starting scope
/// if it happens to be a class scope).
///
/// For example, in this code:
/// ```python
/// x = 1
/// class A:
/// x = 2
/// def method(self):
/// print(x) # Refers to global x=1, not class x=2
/// ```
/// The `method` function can see the global scope but not the class scope.
pub(crate) fn visible_ancestor_scopes(&self, scope: FileScopeId) -> VisibleAncestorsIter<'_> {
VisibleAncestorsIter::new(&self.scopes, scope)
}
/// Returns the [`definition::Definition`] salsa ingredient(s) for `definition_key`.
///
/// There will only ever be >1 `Definition` associated with a `definition_key`
/// if the definition is created by a wildcard (`*`) import.
#[track_caller]
pub(crate) fn definitions(
&self,
definition_key: impl Into<DefinitionNodeKey>,
) -> &Definitions<'db> {
&self.definitions_by_node[&definition_key.into()]
}
/// Returns the [`definition::Definition`] salsa ingredient for `definition_key`.
///
/// ## Panics
///
/// If the number of definitions associated with the key is not exactly 1 and
/// the `debug_assertions` feature is enabled, this method will panic.
#[track_caller]
pub(crate) fn expect_single_definition(
&self,
definition_key: impl Into<DefinitionNodeKey> + std::fmt::Debug + Copy,
) -> Definition<'db> {
let definitions = self.definitions(definition_key);
debug_assert_eq!(
definitions.len(),
1,
"Expected exactly one definition to be associated with AST node {definition_key:?} but found {}",
definitions.len()
);
definitions[0]
}
/// Returns the [`Expression`] ingredient for an expression node.
/// Panics if we have no expression ingredient for that node. We can only call this method for
/// standalone-inferable expressions, which we call `add_standalone_expression` for in
/// [`SemanticIndexBuilder`].
#[track_caller]
pub(crate) fn expression(
&self,
expression_key: impl Into<ExpressionNodeKey>,
) -> Expression<'db> {
self.expressions_by_node[&expression_key.into()]
}
pub(crate) fn try_expression(
&self,
expression_key: impl Into<ExpressionNodeKey>,
) -> Option<Expression<'db>> {
self.expressions_by_node
.get(&expression_key.into())
.copied()
}
pub(crate) fn is_standalone_expression(
&self,
expression_key: impl Into<ExpressionNodeKey>,
) -> bool {
self.expressions_by_node
.contains_key(&expression_key.into())
}
/// Returns the id of the scope that `node` creates.
/// This is different from [`definition::Definition::scope`] which
/// returns the scope in which that definition is defined in.
#[track_caller]
pub(crate) fn node_scope(&self, node: NodeWithScopeRef) -> FileScopeId {
self.scopes_by_node[&node.node_key()]
}
/// Returns the id of the scope that `node` creates, if it exists.
pub(crate) fn try_node_scope(&self, node: NodeWithScopeRef) -> Option<FileScopeId> {
self.scopes_by_node.get(&node.node_key()).copied()
}
/// Checks if there is an import of `__future__.annotations` in the global scope, which affects
/// the logic for type inference.
pub(super) fn has_future_annotations(&self) -> bool {
self.has_future_annotations
}
/// Returns
/// * `NoLongerInEagerContext` if the nested scope is no longer in an eager context
/// (that is, not every scope that will be traversed is eager) and no lazy snapshots were found.
/// * an iterator of bindings for a particular nested scope reference if the bindings exist.
/// * a narrowing constraint if there are no bindings, but there is a narrowing constraint for an enclosing scope place.
/// * `NotFound` if the narrowing constraint / bindings do not exist in the nested scope.
pub(crate) fn enclosing_snapshot(
&self,
enclosing_scope: FileScopeId,
expr: PlaceExprRef,
nested_scope: FileScopeId,
) -> EnclosingSnapshotResult<'_, 'db> {
for (ancestor_scope_id, ancestor_scope) in self.ancestor_scopes(nested_scope) {
if ancestor_scope_id == enclosing_scope {
break;
}
if !ancestor_scope.is_eager() {
if let PlaceExprRef::Symbol(symbol) = expr
&& let Some(place_id) =
self.place_tables[enclosing_scope].symbol_id(symbol.name())
{
let key = EnclosingSnapshotKey {
enclosing_scope,
enclosing_place: place_id.into(),
nested_scope,
nested_laziness: ScopeLaziness::Lazy,
};
if let Some(id) = self.enclosing_snapshots.get(&key) {
return self.use_def_maps[enclosing_scope]
.enclosing_snapshot(*id, key.nested_laziness);
}
}
return EnclosingSnapshotResult::NoLongerInEagerContext;
}
}
let Some(place_id) = self.place_tables[enclosing_scope].place_id(expr) else {
return EnclosingSnapshotResult::NotFound;
};
let key = EnclosingSnapshotKey {
enclosing_scope,
enclosing_place: place_id,
nested_scope,
nested_laziness: ScopeLaziness::Eager,
};
let Some(id) = self.enclosing_snapshots.get(&key) else {
return EnclosingSnapshotResult::NotFound;
};
self.use_def_maps[enclosing_scope].enclosing_snapshot(*id, key.nested_laziness)
}
pub(crate) fn semantic_syntax_errors(&self) -> &[SemanticSyntaxError] {
&self.semantic_syntax_errors
}
}
pub(crate) struct AncestorsIter<'a> {
scopes: &'a IndexSlice<FileScopeId, Scope>,
next_id: Option<FileScopeId>,
}
impl<'a> AncestorsIter<'a> {
fn new(scopes: &'a IndexSlice<FileScopeId, Scope>, start: FileScopeId) -> Self {
Self {
scopes,
next_id: Some(start),
}
}
}
impl<'a> Iterator for AncestorsIter<'a> {
type Item = (FileScopeId, &'a Scope);
fn next(&mut self) -> Option<Self::Item> {
let current_id = self.next_id?;
let current = &self.scopes[current_id];
self.next_id = current.parent();
Some((current_id, current))
}
}
impl FusedIterator for AncestorsIter<'_> {}
pub(crate) struct VisibleAncestorsIter<'a> {
inner: AncestorsIter<'a>,
starting_scope_kind: ScopeKind,
yielded_count: usize,
}
impl<'a> VisibleAncestorsIter<'a> {
fn new(scopes: &'a IndexSlice<FileScopeId, Scope>, start: FileScopeId) -> Self {
let starting_scope = &scopes[start];
Self {
inner: AncestorsIter::new(scopes, start),
starting_scope_kind: starting_scope.kind(),
yielded_count: 0,
}
}
}
impl<'a> Iterator for VisibleAncestorsIter<'a> {
type Item = (FileScopeId, &'a Scope);
fn next(&mut self) -> Option<Self::Item> {
loop {
let (scope_id, scope) = self.inner.next()?;
self.yielded_count += 1;
// Always return the first scope (the starting scope)
if self.yielded_count == 1 {
return Some((scope_id, scope));
}
// Skip class scopes for subsequent scopes (following Python's lexical scoping rules)
// Exception: type parameter scopes can see names defined in an immediately-enclosing class scope
if scope.kind() == ScopeKind::Class {
// Allow annotation scopes to see their immediately-enclosing class scope exactly once
if self.starting_scope_kind.is_annotation() && self.yielded_count == 2 {
return Some((scope_id, scope));
}
continue;
}
return Some((scope_id, scope));
}
}
}
impl FusedIterator for VisibleAncestorsIter<'_> {}
pub(crate) struct DescendantsIter<'a> {
next_id: FileScopeId,
descendants: std::slice::Iter<'a, Scope>,
}
impl<'a> DescendantsIter<'a> {
fn new(scopes: &'a IndexSlice<FileScopeId, Scope>, scope_id: FileScopeId) -> Self {
let scope = &scopes[scope_id];
let scopes = &scopes[scope.descendants()];
Self {
next_id: scope_id + 1,
descendants: scopes.iter(),
}
}
}
impl<'a> Iterator for DescendantsIter<'a> {
type Item = (FileScopeId, &'a Scope);
fn next(&mut self) -> Option<Self::Item> {
let descendant = self.descendants.next()?;
let id = self.next_id;
self.next_id = self.next_id + 1;
Some((id, descendant))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.descendants.size_hint()
}
}
impl FusedIterator for DescendantsIter<'_> {}
impl ExactSizeIterator for DescendantsIter<'_> {}
pub(crate) struct ChildrenIter<'a> {
parent: FileScopeId,
descendants: DescendantsIter<'a>,
}
impl<'a> ChildrenIter<'a> {
pub(crate) fn new(scopes: &'a IndexSlice<FileScopeId, Scope>, parent: FileScopeId) -> Self {
let descendants = DescendantsIter::new(scopes, parent);
Self {
parent,
descendants,
}
}
}
impl<'a> Iterator for ChildrenIter<'a> {
type Item = (FileScopeId, &'a Scope);
fn next(&mut self) -> Option<Self::Item> {
self.descendants
.find(|(_, scope)| scope.parent() == Some(self.parent))
}
}
impl FusedIterator for ChildrenIter<'_> {}
/// Interval map that maps a range of expression node ids to their corresponding scopes.
///
/// Lookups require `O(log n)` time, where `n` is roughly the number of scopes (roughly
/// because sub-scopes can be interleaved with expressions in the outer scope, e.g. function, some statements, a function).
#[derive(Eq, PartialEq, Debug, get_size2::GetSize, Default)]
struct ExpressionsScopeMap(Box<[(std::ops::RangeInclusive<NodeIndex>, FileScopeId)]>);
impl ExpressionsScopeMap {
fn try_get<E>(&self, node: &E) -> Option<FileScopeId>
where
E: HasTrackedScope,
{
let node_index = node.node_index().load();
let entry = self
.0
.binary_search_by_key(&node_index, |(range, _)| *range.start());
let index = match entry {
Ok(index) => index,
Err(index) => index.checked_sub(1)?,
};
let (range, scope) = &self.0[index];
if range.contains(&node_index) {
Some(*scope)
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use ruff_db::files::{File, system_path_to_file};
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
use ruff_python_ast::{self as ast};
use ruff_text_size::{Ranged, TextRange};
use crate::Db;
use crate::db::tests::{TestDb, TestDbBuilder};
use crate::semantic_index::ast_ids::{HasScopedUseId, ScopedUseId};
use crate::semantic_index::definition::{Definition, DefinitionKind};
use crate::semantic_index::place::PlaceTable;
use crate::semantic_index::scope::{FileScopeId, Scope, ScopeKind};
use crate::semantic_index::symbol::ScopedSymbolId;
use crate::semantic_index::use_def::UseDefMap;
use crate::semantic_index::{global_scope, place_table, semantic_index, use_def_map};
impl UseDefMap<'_> {
fn first_public_binding(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
self.end_of_scope_symbol_bindings(symbol)
.find_map(|constrained_binding| constrained_binding.binding.definition())
}
fn first_binding_at_use(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
self.bindings_at_use(use_id)
.find_map(|constrained_binding| constrained_binding.binding.definition())
}
}
struct TestCase {
db: TestDb,
file: File,
}
fn test_case(content: &str) -> TestCase {
const FILENAME: &str = "test.py";
let db = TestDbBuilder::new()
.with_file(FILENAME, content)
.build()
.unwrap();
let file = system_path_to_file(&db, FILENAME).unwrap();
TestCase { db, file }
}
fn names(table: &PlaceTable) -> Vec<String> {
table
.symbols()
.map(|expr| expr.name().to_string())
.collect()
}
#[test]
fn empty() {
let TestCase { db, file } = test_case("");
let global_table = place_table(&db, global_scope(&db, file));
let global_names = names(global_table);
assert_eq!(global_names, Vec::<&str>::new());
}
#[test]
fn simple() {
let TestCase { db, file } = test_case("x");
let global_table = place_table(&db, global_scope(&db, file));
assert_eq!(names(global_table), vec!["x"]);
}
#[test]
fn annotation_only() {
let TestCase { db, file } = test_case("x: int");
let global_table = place_table(&db, global_scope(&db, file));
assert_eq!(names(global_table), vec!["int", "x"]);
// TODO record definition
}
#[test]
fn import() {
let TestCase { db, file } = test_case("import foo");
let scope = global_scope(&db, file);
let global_table = place_table(&db, scope);
assert_eq!(names(global_table), vec!["foo"]);
let foo = global_table.symbol_id("foo").unwrap();
let use_def = use_def_map(&db, scope);
let binding = use_def.first_public_binding(foo).unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Import(_)));
}
#[test]
fn import_sub() {
let TestCase { db, file } = test_case("import foo.bar");
let global_table = place_table(&db, global_scope(&db, file));
assert_eq!(names(global_table), vec!["foo"]);
}
#[test]
fn import_as() {
let TestCase { db, file } = test_case("import foo.bar as baz");
let global_table = place_table(&db, global_scope(&db, file));
assert_eq!(names(global_table), vec!["baz"]);
}
#[test]
fn import_from() {
let TestCase { db, file } = test_case("from bar import foo");
let scope = global_scope(&db, file);
let global_table = place_table(&db, scope);
assert_eq!(names(global_table), vec!["foo"]);
assert!(
global_table
.symbol_by_name("foo")
.is_some_and(|symbol| { symbol.is_bound() && !symbol.is_used() }),
"symbols that are defined get the defined flag"
);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id("foo").expect("symbol to exist"))
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::ImportFrom(_)));
}
#[test]
fn assign() {
let TestCase { db, file } = test_case("x = foo");
let scope = global_scope(&db, file);
let global_table = place_table(&db, scope);
assert_eq!(names(global_table), vec!["foo", "x"]);
assert!(
global_table
.symbol_by_name("foo")
.is_some_and(|symbol| { !symbol.is_bound() && symbol.is_used() }),
"a symbol used but not bound in a scope should have only the used flag"
);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id("x").expect("symbol exists"))
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | true |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/pull_types.rs | crates/ty_python_semantic/src/pull_types.rs | //! A utility visitor for testing, which attempts to "pull a type" for ever sub-node in a given AST.
//!
//! This is used in the "corpus" and (indirectly) the "mdtest" integration tests for this crate.
//! (Mdtest uses the `pull_types` function via the `ty_test` crate.)
use crate::{Db, HasType, SemanticModel};
use ruff_db::{files::File, parsed::parsed_module};
use ruff_python_ast::{
self as ast, visitor::source_order, visitor::source_order::SourceOrderVisitor,
};
pub fn pull_types(db: &dyn Db, file: File) {
let mut visitor = PullTypesVisitor::new(db, file);
let ast = parsed_module(db, file).load(db);
visitor.visit_body(ast.suite());
}
struct PullTypesVisitor<'db> {
model: SemanticModel<'db>,
}
impl<'db> PullTypesVisitor<'db> {
fn new(db: &'db dyn Db, file: File) -> Self {
Self {
model: SemanticModel::new(db, file),
}
}
fn visit_target(&mut self, target: &ast::Expr) {
match target {
ast::Expr::List(ast::ExprList { elts, .. })
| ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => {
for element in elts {
self.visit_target(element);
}
}
_ => self.visit_expr(target),
}
}
}
impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
match stmt {
ast::Stmt::FunctionDef(function) => {
let _ty = function.inferred_type(&self.model);
}
ast::Stmt::ClassDef(class) => {
let _ty = class.inferred_type(&self.model);
}
ast::Stmt::Assign(assign) => {
for target in &assign.targets {
self.visit_target(target);
}
self.visit_expr(&assign.value);
return;
}
ast::Stmt::For(for_stmt) => {
self.visit_target(&for_stmt.target);
self.visit_expr(&for_stmt.iter);
self.visit_body(&for_stmt.body);
self.visit_body(&for_stmt.orelse);
return;
}
ast::Stmt::With(with_stmt) => {
for item in &with_stmt.items {
if let Some(target) = &item.optional_vars {
self.visit_target(target);
}
self.visit_expr(&item.context_expr);
}
self.visit_body(&with_stmt.body);
return;
}
ast::Stmt::AnnAssign(_)
| ast::Stmt::Return(_)
| ast::Stmt::Delete(_)
| ast::Stmt::AugAssign(_)
| ast::Stmt::TypeAlias(_)
| ast::Stmt::While(_)
| ast::Stmt::If(_)
| ast::Stmt::Match(_)
| ast::Stmt::Raise(_)
| ast::Stmt::Try(_)
| ast::Stmt::Assert(_)
| ast::Stmt::Import(_)
| ast::Stmt::ImportFrom(_)
| ast::Stmt::Global(_)
| ast::Stmt::Nonlocal(_)
| ast::Stmt::Expr(_)
| ast::Stmt::Pass(_)
| ast::Stmt::Break(_)
| ast::Stmt::Continue(_)
| ast::Stmt::IpyEscapeCommand(_) => {}
}
source_order::walk_stmt(self, stmt);
}
fn visit_expr(&mut self, expr: &ast::Expr) {
let _ty = expr.inferred_type(&self.model);
source_order::walk_expr(self, expr);
}
fn visit_comprehension(&mut self, comprehension: &ast::Comprehension) {
self.visit_expr(&comprehension.iter);
self.visit_target(&comprehension.target);
for if_expr in &comprehension.ifs {
self.visit_expr(if_expr);
}
}
fn visit_parameter(&mut self, parameter: &ast::Parameter) {
let _ty = parameter.inferred_type(&self.model);
source_order::walk_parameter(self, parameter);
}
fn visit_parameter_with_default(&mut self, parameter_with_default: &ast::ParameterWithDefault) {
let _ty = parameter_with_default.inferred_type(&self.model);
source_order::walk_parameter_with_default(self, parameter_with_default);
}
fn visit_alias(&mut self, alias: &ast::Alias) {
let _ty = alias.inferred_type(&self.model);
source_order::walk_alias(self, alias);
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/rank.rs | crates/ty_python_semantic/src/rank.rs | //! A boxed bit slice that supports a constant-time `rank` operation.
use bitvec::prelude::{BitBox, Msb0};
use get_size2::GetSize;
/// A boxed bit slice that supports a constant-time `rank` operation.
///
/// This can be used to "shrink" a large vector, where you only need to keep certain elements, and
/// you want to continue to use the index in the large vector to identify each element.
///
/// First you create a new smaller vector, keeping only the elements of the large vector that you
/// care about. Now you need a way to translate an index into the large vector (which no longer
/// exists) into the corresponding index into the smaller vector. To do that, you create a bit
/// slice, containing a bit for every element of the original large vector. Each bit in the bit
/// slice indicates whether that element of the large vector was kept in the smaller vector. And
/// the `rank` of the bit gives us the index of the element in the smaller vector.
///
/// However, the naive implementation of `rank` is O(n) in the size of the bit slice. To address
/// that, we use a standard trick: we divide the bit slice into 64-bit chunks, and when
/// constructing the bit slice, precalculate the rank of the first bit in each chunk. Then, to
/// calculate the rank of an arbitrary bit, we first grab the precalculated rank of the chunk that
/// bit belongs to, and add the rank of the bit within its (fixed-sized) chunk.
///
/// This trick adds O(1.5) bits of overhead per large vector element on 64-bit platforms, and O(2)
/// bits of overhead on 32-bit platforms.
#[derive(Clone, Debug, Eq, PartialEq, GetSize)]
pub(crate) struct RankBitBox {
#[get_size(size_fn = bit_box_size)]
bits: BitBox<Chunk, Msb0>,
chunk_ranks: Box<[u32]>,
}
fn bit_box_size(bits: &BitBox<Chunk, Msb0>) -> usize {
bits.as_raw_slice().get_heap_size()
}
// bitvec does not support `u64` as a Store type on 32-bit platforms
#[cfg(target_pointer_width = "64")]
type Chunk = u64;
#[cfg(not(target_pointer_width = "64"))]
type Chunk = u32;
const CHUNK_SIZE: usize = Chunk::BITS as usize;
impl RankBitBox {
pub(crate) fn from_bits(iter: impl Iterator<Item = bool>) -> Self {
let bits: BitBox<Chunk, Msb0> = iter.collect();
let chunk_ranks = bits
.as_raw_slice()
.iter()
.scan(0u32, |rank, chunk| {
let result = *rank;
*rank += chunk.count_ones();
Some(result)
})
.collect();
Self { bits, chunk_ranks }
}
#[inline]
pub(crate) fn get_bit(&self, index: usize) -> Option<bool> {
self.bits.get(index).map(|bit| *bit)
}
/// Returns the number of bits _before_ (and not including) the given index that are set.
#[inline]
pub(crate) fn rank(&self, index: usize) -> u32 {
let chunk_index = index / CHUNK_SIZE;
let index_within_chunk = index % CHUNK_SIZE;
let chunk_rank = self.chunk_ranks[chunk_index];
if index_within_chunk == 0 {
return chunk_rank;
}
// To calculate the rank within the bit's chunk, we zero out the requested bit and every
// bit to the right, then count the number of 1s remaining (i.e., to the left of the
// requested bit).
let chunk = self.bits.as_raw_slice()[chunk_index];
let chunk_mask = Chunk::MAX << (CHUNK_SIZE - index_within_chunk);
let rank_within_chunk = (chunk & chunk_mask).count_ones();
chunk_rank + rank_within_chunk
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/node_key.rs | crates/ty_python_semantic/src/node_key.rs | use ruff_python_ast::{HasNodeIndex, NodeIndex};
use crate::ast_node_ref::AstNodeRef;
/// Compact key for a node for use in a hash map.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, get_size2::GetSize)]
pub(super) struct NodeKey(NodeIndex);
impl NodeKey {
pub(super) fn from_node<N>(node: N) -> Self
where
N: HasNodeIndex,
{
NodeKey(node.node_index().load())
}
pub(super) fn from_node_ref<T>(node_ref: &AstNodeRef<T>) -> Self {
NodeKey(node_ref.index())
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/site_packages.rs | crates/ty_python_semantic/src/site_packages.rs | //! Utilities for finding the `site-packages` directory,
//! into which third-party packages are installed.
//!
//! The routines exposed by this module have different behaviour depending
//! on the platform of the *host machine*, which may be
//! different from the *target platform for type checking*. (A user
//! might be running ty on a Windows machine, but might
//! reasonably ask us to type-check code assuming that the code runs
//! on Linux.)
use std::io;
use std::num::NonZeroUsize;
use std::ops::Deref;
use std::str::FromStr;
use std::{fmt, sync::Arc};
use crate::{PythonVersionFileSource, PythonVersionSource, PythonVersionWithSource};
use camino::Utf8Component;
use indexmap::IndexSet;
use ruff_annotate_snippets::{Level, Renderer, Snippet};
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_python_ast::PythonVersion;
use ruff_python_trivia::Cursor;
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
use ruff_text_size::{TextLen, TextRange};
use strum::IntoEnumIterator;
use ty_static::EnvVars;
type SitePackagesDiscoveryResult<T> = Result<T, SitePackagesDiscoveryError>;
type StdlibDiscoveryResult<T> = Result<T, StdlibDiscoveryError>;
/// An ordered, deduplicated set of `site-packages` search paths.
///
/// Most environments will only have one `site-packages` directory.
/// Some virtual environments created with `--system-site-packages`
/// will also have the system installation's `site-packages` packages
/// available, however. Ephemeral environments created with `uv` in
/// `uv run --with` invocations, meanwhile, "extend" a parent environment
/// (which could be another virtual environment or a system installation,
/// and which could itself have multiple `site-packages` directories).
///
/// We use an `IndexSet` here to guard against the (very remote)
/// possibility that an environment might somehow be marked as being
/// both a `--system-site-packages` virtual environment *and* an
/// ephemeral environment that extends the system environment. If this
/// were the case, the system environment's `site-packages` directory
/// *might* be added to the `SitePackagesPaths` twice, but we wouldn't
/// want duplicates to appear in this set.
#[derive(Debug, PartialEq, Eq, Default)]
pub struct SitePackagesPaths(IndexSet<SystemPathBuf>);
impl SitePackagesPaths {
fn is_empty(&self) -> bool {
self.0.is_empty()
}
fn insert(&mut self, path: SystemPathBuf) {
self.0.insert(path);
}
fn extend(&mut self, other: Self) {
self.0.extend(other.0);
}
/// Concatenate two instances of [`SitePackagesPaths`].
#[must_use]
pub fn concatenate(mut self, other: Self) -> Self {
for path in other {
self.0.insert(path);
}
self
}
/// Tries to detect the version from the layout of the `site-packages` directory.
pub fn python_version_from_layout(&self) -> Option<PythonVersionWithSource> {
if cfg!(windows) {
// The path to `site-packages` on Unix is
// `<sys.prefix>/lib/pythonX.Y/site-packages`,
// but on Windows it's `<sys.prefix>/Lib/site-packages`.
return None;
}
let primary_site_packages = self.0.first()?;
let mut site_packages_ancestor_components =
primary_site_packages.components().rev().skip(1).map(|c| {
// This should have all been validated in `site_packages.rs`
// when we resolved the search paths for the project.
debug_assert!(
matches!(c, Utf8Component::Normal(_)),
"Unexpected component in site-packages path `{c:?}` \
(expected `site-packages` to be an absolute path with symlinks resolved, \
located at `<sys.prefix>/lib/pythonX.Y/site-packages`)"
);
c.as_str()
});
let parent_component = site_packages_ancestor_components.next()?;
if site_packages_ancestor_components.next()? != UnixLibDir::Lib {
return None;
}
let version = parent_component
.strip_prefix("python")
.or_else(|| parent_component.strip_prefix("pypy"))?
.trim_end_matches('t');
let version = PythonVersion::from_str(version).ok()?;
let source = PythonVersionSource::InstallationDirectoryLayout {
site_packages_parent_dir: Box::from(parent_component),
};
Some(PythonVersionWithSource { version, source })
}
pub fn into_vec(self) -> Vec<SystemPathBuf> {
self.0.into_iter().collect()
}
}
impl fmt::Display for SitePackagesPaths {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.0.iter()).finish()
}
}
impl<const N: usize> From<[SystemPathBuf; N]> for SitePackagesPaths {
fn from(paths: [SystemPathBuf; N]) -> Self {
Self(IndexSet::from(paths))
}
}
impl IntoIterator for SitePackagesPaths {
type Item = SystemPathBuf;
type IntoIter = indexmap::set::IntoIter<SystemPathBuf>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl PartialEq<&[SystemPathBuf]> for SitePackagesPaths {
fn eq(&self, other: &&[SystemPathBuf]) -> bool {
self.0.as_slice() == *other
}
}
#[derive(Debug)]
pub enum PythonEnvironment {
Virtual(VirtualEnvironment),
System(SystemEnvironment),
}
impl PythonEnvironment {
/// Discover the python environment using the following priorities:
///
/// 1. activated virtual environment
/// 2. conda (child)
/// 3. working dir virtual environment
/// 4. conda (base)
pub fn discover(
project_root: &SystemPath,
system: &dyn System,
) -> Result<Option<Self>, SitePackagesDiscoveryError> {
fn resolve_environment(
system: &dyn System,
path: &SystemPath,
origin: SysPrefixPathOrigin,
) -> Result<PythonEnvironment, SitePackagesDiscoveryError> {
tracing::debug!("Resolving {origin}: {path}");
PythonEnvironment::new(path, origin, system)
}
if let Ok(virtual_env) = system.env_var(EnvVars::VIRTUAL_ENV) {
return resolve_environment(
system,
SystemPath::new(&virtual_env),
SysPrefixPathOrigin::VirtualEnvVar,
)
.map(Some);
}
if let Some(conda_env) = conda_environment_from_env(system, CondaEnvironmentKind::Child) {
return resolve_environment(system, &conda_env, SysPrefixPathOrigin::CondaPrefixVar)
.map(Some);
}
tracing::debug!("Discovering virtual environment in `{project_root}`");
let virtual_env_directory = project_root.join(".venv");
match PythonEnvironment::new(
&virtual_env_directory,
SysPrefixPathOrigin::LocalVenv,
system,
) {
Ok(environment) => return Ok(Some(environment)),
Err(err) => {
if system.is_directory(&virtual_env_directory) {
tracing::debug!(
"Ignoring automatically detected virtual environment at `{}`: {}",
&virtual_env_directory,
err
);
}
}
}
if let Some(conda_env) = conda_environment_from_env(system, CondaEnvironmentKind::Base) {
return resolve_environment(system, &conda_env, SysPrefixPathOrigin::CondaPrefixVar)
.map(Some);
}
Ok(None)
}
pub fn new(
path: impl AsRef<SystemPath>,
origin: SysPrefixPathOrigin,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Self> {
let path = SysPrefixPath::new(path.as_ref(), origin, system)?;
// Attempt to inspect as a virtual environment first
match VirtualEnvironment::new(path, system) {
Ok(venv) => Ok(Self::Virtual(venv)),
// If there's not a `pyvenv.cfg` marker, attempt to inspect as a system environment
Err(SitePackagesDiscoveryError::NoPyvenvCfgFile(path, _, _))
if !path.origin.must_be_virtual_env() =>
{
Ok(Self::System(SystemEnvironment::new(path)))
}
Err(err) => Err(err),
}
}
/// Returns the Python version that was used to create this environment
/// (will only be available for virtual environments that specify
/// the metadata in their `pyvenv.cfg` files).
pub fn python_version_from_metadata(&self) -> Option<&PythonVersionWithSource> {
match self {
Self::Virtual(venv) => venv.version.as_ref(),
Self::System(_) => None,
}
}
pub fn site_packages_paths(
&self,
system: &dyn System,
) -> SitePackagesDiscoveryResult<SitePackagesPaths> {
match self {
Self::Virtual(env) => env.site_packages_directories(system),
Self::System(env) => env.site_packages_directories(system),
}
}
pub fn real_stdlib_path(&self, system: &dyn System) -> StdlibDiscoveryResult<SystemPathBuf> {
match self {
Self::Virtual(env) => env.real_stdlib_directory(system),
Self::System(env) => env.real_stdlib_directory(system),
}
}
pub fn origin(&self) -> &SysPrefixPathOrigin {
match self {
Self::Virtual(env) => &env.root_path.origin,
Self::System(env) => &env.root_path.origin,
}
}
}
/// Enumeration of the subdirectories of `sys.prefix` that could contain a
/// `site-packages` directory if the host system is Unix-like.
///
/// For example, if `sys.prefix` is `.venv` and the Python version is 3.10,
/// the `site-packages` directory could be located at `.venv/lib/python3.10/site-packages`,
/// or at `.venv/lib64/python3.10/site-packages`, or there could indeed be `site-packages`
/// directories at both of these locations.
#[derive(Debug, Clone, Copy, Eq, PartialEq, strum_macros::EnumIter)]
enum UnixLibDir {
Lib,
Lib64,
}
impl UnixLibDir {
const fn as_str(self) -> &'static str {
match self {
Self::Lib => "lib",
Self::Lib64 => "lib64",
}
}
}
impl std::fmt::Display for UnixLibDir {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
impl AsRef<SystemPath> for UnixLibDir {
fn as_ref(&self) -> &SystemPath {
SystemPath::new(self.as_str())
}
}
impl PartialEq<&str> for UnixLibDir {
fn eq(&self, other: &&str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<UnixLibDir> for &str {
fn eq(&self, other: &UnixLibDir) -> bool {
other == self
}
}
/// The Python runtime that produced the venv.
///
/// We only need to distinguish cases that change the on-disk layout.
/// Everything else can be treated like CPython.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default)]
pub(crate) enum PythonImplementation {
CPython,
PyPy,
GraalPy,
/// Fallback when the value is missing or unrecognised.
/// We treat it like CPython but keep the information for diagnostics.
#[default]
Unknown,
}
impl PythonImplementation {
/// Return the relative path from `sys.prefix` to the `site-packages` directory
/// if this is a known implementation. Return `None` if this is an unknown implementation.
fn relative_site_packages_path(
self,
lib_dir: UnixLibDir,
version: Option<PythonVersion>,
) -> Option<String> {
match self {
Self::CPython | Self::GraalPy => {
version.map(|version| format!("{lib_dir}/python{version}/site-packages"))
}
Self::PyPy => version.map(|version| format!("{lib_dir}/pypy{version}/site-packages")),
Self::Unknown => None,
}
}
/// Return the relative path from `sys.prefix` to the directory containing the python stdlib's
/// .pys if this is a known implementation. Return `None` if this is an unknown implementation.
fn relative_stdlib_path(self, version: Option<PythonVersion>) -> Option<String> {
match self {
Self::CPython | Self::GraalPy => version.map(|version| format!("lib/python{version}")),
Self::PyPy => version.map(|version| format!("lib/pypy{version}")),
Self::Unknown => None,
}
}
}
/// Abstraction for a Python virtual environment.
///
/// Most of this information is derived from the virtual environment's `pyvenv.cfg` file.
/// The format of this file is not defined anywhere, and exactly which keys are present
/// depends on the tool that was used to create the virtual environment.
#[derive(Debug)]
pub struct VirtualEnvironment {
root_path: SysPrefixPath,
base_executable_home_path: PythonHomePath,
include_system_site_packages: bool,
/// The version of the Python executable that was used to create this virtual environment.
///
/// The Python version is encoded under different keys and in different formats
/// by different virtual-environment creation tools,
/// and the key is never read by the standard-library `site.py` module,
/// so it's possible that we might not be able to find this information
/// in an acceptable format under any of the keys we expect.
/// This field will be `None` if so.
version: Option<PythonVersionWithSource>,
implementation: PythonImplementation,
/// If this virtual environment was created using uv,
/// it may be an "ephemeral" virtual environment that dynamically adds the `site-packages`
/// directories of its parent environment to `sys.path` at runtime.
/// Newer versions of uv record the parent environment in the `pyvenv.cfg` file;
/// we'll want to add the `site-packages` directories of the parent environment
/// as search paths as well as the `site-packages` directories of this virtual environment.
parent_environment: Option<Box<PythonEnvironment>>,
}
impl VirtualEnvironment {
pub(crate) fn new(
path: SysPrefixPath,
system: &dyn System,
) -> SitePackagesDiscoveryResult<Self> {
let pyvenv_cfg_path = path.join("pyvenv.cfg");
tracing::debug!("Attempting to parse virtual environment metadata at '{pyvenv_cfg_path}'");
let pyvenv_cfg = match system.read_to_string(&pyvenv_cfg_path) {
Ok(pyvenv_cfg) => pyvenv_cfg,
Err(err) => {
return Err(SitePackagesDiscoveryError::NoPyvenvCfgFile(
path,
err,
system.dyn_clone(),
));
}
};
let parsed_pyvenv_cfg =
PyvenvCfgParser::new(&pyvenv_cfg)
.parse()
.map_err(|pyvenv_parse_error| {
SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path.clone(),
pyvenv_parse_error,
)
})?;
let RawPyvenvCfg {
include_system_site_packages,
base_executable_home_path,
version,
implementation,
created_with_uv,
parent_environment,
} = parsed_pyvenv_cfg;
// The `home` key is read by the standard library's `site.py` module,
// so if it's missing from the `pyvenv.cfg` file
// (or the provided value is invalid),
// it's reasonable to consider the virtual environment irredeemably broken.
let Some(base_executable_home_path) = base_executable_home_path else {
return Err(SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path,
PyvenvCfgParseErrorKind::NoHomeKey,
));
};
let base_executable_home_path = PythonHomePath::new(base_executable_home_path, system)
.map_err(|io_err| {
SitePackagesDiscoveryError::PyvenvCfgParseError(
pyvenv_cfg_path.clone(),
PyvenvCfgParseErrorKind::InvalidHomeValue(io_err),
)
})?;
// Since the `extends-environment` key is nonstandard,
// for now we only trust it if the virtual environment was created with `uv`.
let parent_environment = if created_with_uv {
parent_environment
.and_then(|sys_prefix| {
PythonEnvironment::new(sys_prefix, SysPrefixPathOrigin::DerivedFromPyvenvCfg, system)
.inspect_err(|err| {
tracing::warn!(
"Failed to resolve the parent environment of this ephemeral uv virtual environment \
from the `extends-environment` value specified in the `pyvenv.cfg` file at {pyvenv_cfg_path}. \
Imports will not be resolved correctly if they refer to packages installed into the parent \
environment. Underlying error: {err}",
);
})
.ok()
})
.map(Box::new)
} else {
None
};
// but the `version`/`version_info` key is not read by the standard library,
// and is provided under different keys depending on which virtual-environment creation tool
// created the `pyvenv.cfg` file. Lenient parsing is appropriate here:
// the file isn't really *invalid* if it doesn't have this key,
// or if the value doesn't parse according to our expectations.
let version = version.and_then(|(version_string, range)| {
let mut version_info_parts = version_string.split('.');
let (major, minor) = (version_info_parts.next()?, version_info_parts.next()?);
let version = PythonVersion::try_from((major, minor)).ok()?;
let source = PythonVersionSource::PyvenvCfgFile(PythonVersionFileSource::new(
Arc::new(pyvenv_cfg_path),
Some(range),
));
Some(PythonVersionWithSource { version, source })
});
let metadata = Self {
root_path: path,
base_executable_home_path,
include_system_site_packages,
version,
implementation,
parent_environment,
};
tracing::trace!("Resolved metadata for virtual environment: {metadata:?}");
Ok(metadata)
}
/// Return a list of `site-packages` directories that are available from this virtual environment
///
/// See the documentation for [`site_packages_directories_from_sys_prefix`] for more details.
pub(crate) fn site_packages_directories(
&self,
system: &dyn System,
) -> SitePackagesDiscoveryResult<SitePackagesPaths> {
let VirtualEnvironment {
root_path,
base_executable_home_path,
include_system_site_packages,
implementation,
version,
parent_environment,
} = self;
let version = version.as_ref().map(|v| v.version);
let mut site_packages_directories =
site_packages_directories_from_sys_prefix(root_path, version, *implementation, system)?;
if let Some(parent_env_site_packages) = parent_environment.as_deref() {
match parent_env_site_packages.site_packages_paths(system) {
Ok(parent_environment_site_packages) => {
site_packages_directories.extend(parent_environment_site_packages);
}
Err(err) => {
tracing::warn!(
"Failed to resolve the site-packages directories of this ephemeral uv virtual environment's \
parent environment. Imports will not be resolved correctly if they refer to packages installed \
into the parent environment. Underlying error: {err}"
);
}
}
}
if *include_system_site_packages {
let system_sys_prefix =
SysPrefixPath::from_executable_home_path(base_executable_home_path);
// If we fail to resolve the `sys.prefix` path from the base executable home path,
// or if we fail to resolve the `site-packages` from the `sys.prefix` path,
// we should probably print a warning but *not* abort type checking
if let Some(sys_prefix_path) = system_sys_prefix {
match site_packages_directories_from_sys_prefix(
&sys_prefix_path,
version,
*implementation,
system,
) {
Ok(system_directories) => {
site_packages_directories.extend(system_directories);
}
Err(error) => tracing::warn!(
"{error}. System site-packages will not be used for module resolution."
),
}
} else {
tracing::warn!(
"Failed to resolve `sys.prefix` of the system Python installation \
from the `home` value in the `pyvenv.cfg` file at `{}`. \
System site-packages will not be used for module resolution.",
root_path.join("pyvenv.cfg")
);
}
}
tracing::debug!(
"Resolved site-packages directories for this virtual environment are: {site_packages_directories}"
);
Ok(site_packages_directories)
}
/// Return the real stdlib path (containing actual .py files, and not some variation of typeshed).
///
/// See the documentation for [`real_stdlib_directory_from_sys_prefix`] for more details.
pub(crate) fn real_stdlib_directory(
&self,
system: &dyn System,
) -> StdlibDiscoveryResult<SystemPathBuf> {
let VirtualEnvironment {
base_executable_home_path,
implementation,
version,
// Unlike site-packages, what we're looking for is never inside the virtual environment
// so this is only used for diagnostics.
root_path,
// We don't need to respect this setting
include_system_site_packages: _,
// We don't need to inherit any info from the parent environment
parent_environment: _,
} = self;
// Unconditionally follow the same logic that `site_packages_directories` uses when
// `include_system_site_packages` is true, as those site-packages should be a subdir
// of the dir we're looking for.
let version = version.as_ref().map(|v| v.version);
if let Some(system_sys_prefix) =
SysPrefixPath::from_executable_home_path_real(system, base_executable_home_path)
{
let real_stdlib_directory = real_stdlib_directory_from_sys_prefix(
&system_sys_prefix,
version,
*implementation,
system,
);
match &real_stdlib_directory {
Ok(path) => tracing::debug!(
"Resolved real stdlib path for this virtual environment is: {path}"
),
Err(_) => tracing::debug!(
"Failed to resolve real stdlib path for this virtual environment"
),
}
real_stdlib_directory
} else {
let cfg_path = root_path.join("pyvenv.cfg");
tracing::debug!(
"Failed to resolve `sys.prefix` of the system Python installation \
from the `home` value in the `pyvenv.cfg` file at `{cfg_path}`. \
System stdlib will not be used for module definitions.",
);
Err(StdlibDiscoveryError::NoSysPrefixFound(cfg_path))
}
}
}
/// Different kinds of conda environment
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub(crate) enum CondaEnvironmentKind {
/// The base Conda environment; treated like a system Python environment.
Base,
/// Any other Conda environment; treated like a virtual environment.
Child,
}
impl CondaEnvironmentKind {
/// Compute the kind of `CONDA_PREFIX` we have.
///
/// The base environment is typically stored in a location matching the `_CONDA_ROOT` path.
///
/// Additionally, when the base environment is active, `CONDA_DEFAULT_ENV` will be set to a
/// name, e.g., `base`, which does not match the `CONDA_PREFIX`, e.g., `/usr/local` instead of
/// `/usr/local/conda/envs/<name>`. Note that the name `CONDA_DEFAULT_ENV` is misleading, it's
/// the active environment name, not a constant base environment name.
fn from_prefix_path(system: &dyn System, path: &SystemPath) -> Self {
// If `_CONDA_ROOT` is set and matches `CONDA_PREFIX`, it's the base environment.
if let Ok(conda_root) = system.env_var(EnvVars::CONDA_ROOT)
&& path.as_str() == conda_root
{
return Self::Base;
}
// Next, we'll use a heuristic based on `CONDA_DEFAULT_ENV`
let Ok(current_env) = system.env_var(EnvVars::CONDA_DEFAULT_ENV) else {
return Self::Child;
};
// If the environment name is "base" or "root", treat it as a base environment
//
// These are the expected names for the base environment; and is retained for backwards
// compatibility, but in a future breaking release we should remove this special-casing.
if current_env == "base" || current_env == "root" {
return Self::Base;
}
// For other environment names, use the path-based logic
let Some(name) = path.file_name() else {
return Self::Child;
};
// If the environment is in a directory matching the name of the environment, it's not
// usually a base environment.
if name == current_env {
Self::Child
} else {
Self::Base
}
}
}
/// Read `CONDA_PREFIX` and confirm that it has the expected kind
pub(crate) fn conda_environment_from_env(
system: &dyn System,
kind: CondaEnvironmentKind,
) -> Option<SystemPathBuf> {
let dir = system
.env_var(EnvVars::CONDA_PREFIX)
.ok()
.filter(|value| !value.is_empty())?;
let path = SystemPathBuf::from(dir);
if kind != CondaEnvironmentKind::from_prefix_path(system, &path) {
return None;
}
Some(path)
}
/// A parser for `pyvenv.cfg` files: metadata files for virtual environments.
///
/// Note that a `pyvenv.cfg` file *looks* like a `.ini` file, but actually isn't valid `.ini` syntax!
///
/// See also: <https://snarky.ca/how-virtual-environments-work/>
#[derive(Debug)]
struct PyvenvCfgParser<'s> {
source: &'s str,
cursor: Cursor<'s>,
line_number: NonZeroUsize,
data: RawPyvenvCfg<'s>,
}
impl<'s> PyvenvCfgParser<'s> {
fn new(source: &'s str) -> Self {
Self {
source,
cursor: Cursor::new(source),
line_number: NonZeroUsize::new(1).unwrap(),
data: RawPyvenvCfg::default(),
}
}
/// Parse the `pyvenv.cfg` file and return the parsed data.
fn parse(mut self) -> Result<RawPyvenvCfg<'s>, PyvenvCfgParseErrorKind> {
while !self.cursor.is_eof() {
self.parse_line()?;
self.line_number = self.line_number.checked_add(1).unwrap();
}
Ok(self.data)
}
/// Parse a single line of the `pyvenv.cfg` file and advance the cursor
/// to the beginning of the next line.
fn parse_line(&mut self) -> Result<(), PyvenvCfgParseErrorKind> {
let PyvenvCfgParser {
source,
cursor,
line_number,
data,
} = self;
let line_number = *line_number;
cursor.eat_while(|c| c.is_whitespace() && c != '\n');
let key_start = cursor.offset();
cursor.eat_while(|c| !matches!(c, '\n' | '='));
let key_end = cursor.offset();
if !cursor.eat_char('=') {
// Skip over any lines that do not contain '=' characters, same as the CPython stdlib
// <https://github.com/python/cpython/blob/e64395e8eb8d3a9e35e3e534e87d427ff27ab0a5/Lib/site.py#L625-L632>
cursor.eat_char('\n');
return Ok(());
}
let key = source[TextRange::new(key_start, key_end)].trim();
cursor.eat_while(|c| c.is_whitespace() && c != '\n');
let value_start = cursor.offset();
cursor.eat_while(|c| c != '\n');
let value = source[TextRange::new(value_start, cursor.offset())].trim();
cursor.eat_char('\n');
if value.is_empty() {
return Err(PyvenvCfgParseErrorKind::MalformedKeyValuePair { line_number });
}
match key {
"include-system-site-packages" => {
data.include_system_site_packages = value.eq_ignore_ascii_case("true");
}
"home" => data.base_executable_home_path = Some(value),
// `virtualenv` and `uv` call this key `version_info`,
// but the stdlib venv module calls it `version`
"version" | "version_info" => {
let version_range = TextRange::at(value_start, value.text_len());
data.version = Some((value, version_range));
}
"implementation" => {
data.implementation = match value.to_ascii_lowercase().as_str() {
"cpython" => PythonImplementation::CPython,
"graalvm" => PythonImplementation::GraalPy,
"pypy" => PythonImplementation::PyPy,
_ => PythonImplementation::Unknown,
};
}
"uv" => data.created_with_uv = true,
"extends-environment" => data.parent_environment = Some(value),
"" => {
return Err(PyvenvCfgParseErrorKind::MalformedKeyValuePair { line_number });
}
_ => {}
}
Ok(())
}
}
/// A `key:value` mapping derived from parsing a `pyvenv.cfg` file.
///
/// This data contained within is still mostly raw and unvalidated.
#[derive(Debug, Default)]
struct RawPyvenvCfg<'s> {
include_system_site_packages: bool,
base_executable_home_path: Option<&'s str>,
version: Option<(&'s str, TextRange)>,
implementation: PythonImplementation,
created_with_uv: bool,
parent_environment: Option<&'s str>,
}
/// A Python environment that is _not_ a virtual environment.
///
/// This environment may or may not be one that is managed by the operating system itself, e.g.,
/// this captures both Homebrew-installed Python versions and the bundled macOS Python installation.
#[derive(Debug)]
pub struct SystemEnvironment {
root_path: SysPrefixPath,
}
impl SystemEnvironment {
/// Create a new system environment from the given path.
///
/// At this time, there is no eager validation and this is infallible. Instead, validation
/// will occur in [`site_packages_directories_from_sys_prefix`] — which will fail if there is not
/// a Python environment at the given path.
pub(crate) fn new(path: SysPrefixPath) -> Self {
Self { root_path: path }
}
/// Return a list of `site-packages` directories that are available from this environment.
///
/// See the documentation for [`site_packages_directories_from_sys_prefix`] for more details.
pub(crate) fn site_packages_directories(
&self,
system: &dyn System,
) -> SitePackagesDiscoveryResult<SitePackagesPaths> {
let SystemEnvironment { root_path } = self;
let site_packages_directories = site_packages_directories_from_sys_prefix(
root_path,
None,
PythonImplementation::Unknown,
system,
)?;
tracing::debug!(
"Resolved site-packages directories for this environment are: {site_packages_directories}"
);
Ok(site_packages_directories)
}
/// Return a list of `site-packages` directories that are available from this environment.
///
/// See the documentation for [`site_packages_directories_from_sys_prefix`] for more details.
pub(crate) fn real_stdlib_directory(
&self,
system: &dyn System,
) -> StdlibDiscoveryResult<SystemPathBuf> {
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | true |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/unpack.rs | crates/ty_python_semantic/src/unpack.rs | use ruff_db::files::File;
use ruff_db::parsed::ParsedModuleRef;
use ruff_python_ast::{self as ast, AnyNodeRef};
use ruff_text_size::{Ranged, TextRange};
use crate::Db;
use crate::ast_node_ref::AstNodeRef;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::scope::{FileScopeId, ScopeId};
/// This ingredient represents a single unpacking.
///
/// This is required to make use of salsa to cache the complete unpacking of multiple variables
/// involved. It allows us to:
/// 1. Avoid doing structural match multiple times for each definition
/// 2. Avoid highlighting the same error multiple times
///
/// ## Module-local type
/// This type should not be used as part of any cross-module API because
/// it holds a reference to the AST node. Range-offset changes
/// then propagate through all usages, and deserialization requires
/// reparsing the entire module.
///
/// E.g. don't use this type in:
///
/// * a return type of a cross-module query
/// * a field of a type that is a return type of a cross-module query
/// * an argument of a cross-module query
#[salsa::tracked(debug, heap_size=ruff_memory_usage::heap_size)]
pub(crate) struct Unpack<'db> {
pub(crate) file: File,
pub(crate) value_file_scope: FileScopeId,
pub(crate) target_file_scope: FileScopeId,
/// The target expression that is being unpacked. For example, in `(a, b) = (1, 2)`, the target
/// expression is `(a, b)`.
#[no_eq]
#[tracked]
#[returns(ref)]
pub(crate) _target: AstNodeRef<ast::Expr>,
/// The ingredient representing the value expression of the unpacking. For example, in
/// `(a, b) = (1, 2)`, the value expression is `(1, 2)`.
pub(crate) value: UnpackValue<'db>,
}
// The Salsa heap is tracked separately.
impl get_size2::GetSize for Unpack<'_> {}
impl<'db> Unpack<'db> {
pub(crate) fn target<'ast>(
self,
db: &'db dyn Db,
parsed: &'ast ParsedModuleRef,
) -> &'ast ast::Expr {
self._target(db).node(parsed)
}
/// Returns the scope where the unpack target expression belongs to.
pub(crate) fn target_scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.target_file_scope(db).to_scope_id(db, self.file(db))
}
/// Returns the range of the unpack target expression.
pub(crate) fn range(self, db: &'db dyn Db, module: &ParsedModuleRef) -> TextRange {
self.target(db, module).range()
}
}
/// The expression that is being unpacked.
#[derive(Clone, Copy, Debug, Hash, salsa::Update, get_size2::GetSize)]
pub(crate) struct UnpackValue<'db> {
/// The kind of unpack expression
kind: UnpackKind,
/// The expression we are unpacking
expression: Expression<'db>,
}
impl<'db> UnpackValue<'db> {
pub(crate) fn new(kind: UnpackKind, expression: Expression<'db>) -> Self {
Self { kind, expression }
}
/// Returns the underlying [`Expression`] that is being unpacked.
pub(crate) const fn expression(self) -> Expression<'db> {
self.expression
}
/// Returns the expression as an [`AnyNodeRef`].
pub(crate) fn as_any_node_ref<'ast>(
self,
db: &'db dyn Db,
module: &'ast ParsedModuleRef,
) -> AnyNodeRef<'ast> {
self.expression().node_ref(db, module).into()
}
pub(crate) const fn kind(self) -> UnpackKind {
self.kind
}
}
#[derive(Clone, Copy, Debug, Hash, salsa::Update, get_size2::GetSize)]
pub(crate) enum EvaluationMode {
Sync,
Async,
}
impl EvaluationMode {
pub(crate) const fn from_is_async(is_async: bool) -> Self {
if is_async {
EvaluationMode::Async
} else {
EvaluationMode::Sync
}
}
pub(crate) const fn is_async(self) -> bool {
matches!(self, EvaluationMode::Async)
}
}
#[derive(Clone, Copy, Debug, Hash, salsa::Update, get_size2::GetSize)]
pub(crate) enum UnpackKind {
/// An iterable expression like the one in a `for` loop or a comprehension.
Iterable { mode: EvaluationMode },
/// An context manager expression like the one in a `with` statement.
ContextManager { mode: EvaluationMode },
/// An expression that is being assigned to a target.
Assign,
}
/// The position of the target element in an unpacking.
#[derive(Clone, Copy, Debug, Hash, PartialEq, salsa::Update, get_size2::GetSize)]
pub(crate) enum UnpackPosition {
/// The target element is in the first position of the unpacking.
First,
/// The target element is in the position other than the first position of the unpacking.
Other,
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/subscript.rs | crates/ty_python_semantic/src/subscript.rs | //! This module provides utility functions for indexing (`PyIndex`) and slicing
//! operations (`PySlice`) on iterators, following the semantics of equivalent
//! operations in Python.
use itertools::Either;
use crate::Db;
#[derive(Debug, Clone, Copy, PartialEq)]
pub(crate) struct OutOfBoundsError;
pub(crate) trait PyIndex<'db> {
type Item: 'db;
fn py_index(self, db: &'db dyn Db, index: i32) -> Result<Self::Item, OutOfBoundsError>;
}
fn from_nonnegative_i32(index: i32) -> usize {
static_assertions::const_assert!(usize::BITS >= 32);
debug_assert!(index >= 0);
usize::try_from(index)
.expect("Should only ever pass a positive integer to `from_nonnegative_i32`")
}
fn from_negative_i32(index: i32) -> usize {
static_assertions::const_assert!(usize::BITS >= 32);
index.checked_neg().map(from_nonnegative_i32).unwrap_or({
// 'checked_neg' only fails for i32::MIN. We cannot
// represent -i32::MIN as a i32, but we can represent
// it as a usize, since usize is at least 32 bits.
from_nonnegative_i32(i32::MAX) + 1
})
}
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
enum Position {
BeforeStart,
AtIndex(usize),
AfterEnd,
}
pub(crate) enum Nth {
FromStart(usize),
FromEnd(usize),
}
impl Nth {
pub(crate) fn from_index(index: i32) -> Self {
if index >= 0 {
Nth::FromStart(from_nonnegative_i32(index))
} else {
Nth::FromEnd(from_negative_i32(index) - 1)
}
}
fn to_position(&self, len: usize) -> Position {
debug_assert!(len > 0);
match self {
Nth::FromStart(nth) => {
if *nth < len {
Position::AtIndex(*nth)
} else {
Position::AfterEnd
}
}
Nth::FromEnd(nth_rev) => {
if *nth_rev < len {
Position::AtIndex(len - 1 - *nth_rev)
} else {
Position::BeforeStart
}
}
}
}
}
impl<'db, T> PyIndex<'db> for &'db [T] {
type Item = &'db T;
fn py_index(self, _db: &'db dyn Db, index: i32) -> Result<&'db T, OutOfBoundsError> {
match Nth::from_index(index) {
Nth::FromStart(nth) => self.get(nth).ok_or(OutOfBoundsError),
Nth::FromEnd(nth_rev) => (self.len().checked_sub(nth_rev + 1))
.map(|idx| &self[idx])
.ok_or(OutOfBoundsError),
}
}
}
impl<'db, I: 'db, T> PyIndex<'db> for &mut T
where
T: DoubleEndedIterator<Item = I>,
{
type Item = I;
fn py_index(self, _db: &'db dyn Db, index: i32) -> Result<I, OutOfBoundsError> {
match Nth::from_index(index) {
Nth::FromStart(nth) => self.nth(nth).ok_or(OutOfBoundsError),
Nth::FromEnd(nth_rev) => self.nth_back(nth_rev).ok_or(OutOfBoundsError),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub(crate) struct StepSizeZeroError;
pub(crate) trait PySlice<'db> {
type Item: 'db;
fn py_slice(
&self,
db: &'db dyn Db,
start: Option<i32>,
stop: Option<i32>,
step: Option<i32>,
) -> Result<impl Iterator<Item = Self::Item>, StepSizeZeroError>;
}
impl<'db, T> PySlice<'db> for [T]
where
T: Copy + 'db,
{
type Item = T;
fn py_slice(
&self,
_db: &'db dyn Db,
start: Option<i32>,
stop: Option<i32>,
step_int: Option<i32>,
) -> Result<impl Iterator<Item = Self::Item>, StepSizeZeroError> {
let step_int = step_int.unwrap_or(1);
if step_int == 0 {
return Err(StepSizeZeroError);
}
let len = self.len();
if len == 0 {
// The iterator needs to have the same type as the step>0 case below,
// so we need to use `.skip(0)`.
#[expect(clippy::iter_skip_zero)]
return Ok(Either::Left(self.iter().skip(0).take(0).step_by(1)).copied());
}
let to_position = |index| Nth::from_index(index).to_position(len);
let iter = if step_int.is_positive() {
let step = from_nonnegative_i32(step_int);
let start = start.map(to_position).unwrap_or(Position::BeforeStart);
let stop = stop.map(to_position).unwrap_or(Position::AfterEnd);
let (skip, take, step) = if start < stop {
let skip = match start {
Position::BeforeStart => 0,
Position::AtIndex(start_index) => start_index,
Position::AfterEnd => len,
};
let take = match stop {
Position::BeforeStart => 0,
Position::AtIndex(stop_index) => stop_index - skip,
Position::AfterEnd => len - skip,
};
(skip, take, step)
} else {
(0, 0, step)
};
Either::Left(self.iter().skip(skip).take(take).step_by(step))
} else {
let step = from_negative_i32(step_int);
let start = start.map(to_position).unwrap_or(Position::AfterEnd);
let stop = stop.map(to_position).unwrap_or(Position::BeforeStart);
let (skip, take, step) = if start <= stop {
(0, 0, step)
} else {
let skip = match start {
Position::BeforeStart => len,
Position::AtIndex(start_index) => len - 1 - start_index,
Position::AfterEnd => 0,
};
let take = match stop {
Position::BeforeStart => len - skip,
Position::AtIndex(stop_index) => (len - 1) - skip - stop_index,
Position::AfterEnd => 0,
};
(skip, take, step)
};
Either::Right(self.iter().rev().skip(skip).take(take).step_by(step))
};
Ok(iter.copied())
}
}
#[cfg(test)]
#[expect(clippy::redundant_clone)]
mod tests {
use crate::Db;
use crate::db::tests::setup_db;
use crate::subscript::{OutOfBoundsError, StepSizeZeroError};
use super::{PyIndex, PySlice};
use itertools::{Itertools, assert_equal};
#[test]
fn py_index_empty() {
let db = setup_db();
let iter = std::iter::empty::<char>();
assert_eq!(iter.clone().py_index(&db, 0), Err(OutOfBoundsError));
assert_eq!(iter.clone().py_index(&db, 1), Err(OutOfBoundsError));
assert_eq!(iter.clone().py_index(&db, -1), Err(OutOfBoundsError));
assert_eq!(iter.clone().py_index(&db, i32::MIN), Err(OutOfBoundsError));
assert_eq!(iter.clone().py_index(&db, i32::MAX), Err(OutOfBoundsError));
}
#[test]
fn py_index_single_element() {
let db = setup_db();
let iter = ['a'].into_iter();
assert_eq!(iter.clone().py_index(&db, 0), Ok('a'));
assert_eq!(iter.clone().py_index(&db, 1), Err(OutOfBoundsError));
assert_eq!(iter.clone().py_index(&db, -1), Ok('a'));
assert_eq!(iter.clone().py_index(&db, -2), Err(OutOfBoundsError));
}
#[test]
fn py_index_more_elements() {
let db = setup_db();
let iter = ['a', 'b', 'c', 'd', 'e'].into_iter();
assert_eq!(iter.clone().py_index(&db, 0), Ok('a'));
assert_eq!(iter.clone().py_index(&db, 1), Ok('b'));
assert_eq!(iter.clone().py_index(&db, 4), Ok('e'));
assert_eq!(iter.clone().py_index(&db, 5), Err(OutOfBoundsError));
assert_eq!(iter.clone().py_index(&db, -1), Ok('e'));
assert_eq!(iter.clone().py_index(&db, -2), Ok('d'));
assert_eq!(iter.clone().py_index(&db, -5), Ok('a'));
assert_eq!(iter.clone().py_index(&db, -6), Err(OutOfBoundsError));
}
#[test]
fn py_index_uses_full_index_range() {
let db = setup_db();
let iter = 0..=u32::MAX;
// u32::MAX - |i32::MIN| + 1 = 2^32 - 1 - 2^31 + 1 = 2^31
assert_eq!(iter.clone().py_index(&db, i32::MIN), Ok(2u32.pow(31)));
assert_eq!(iter.clone().py_index(&db, -2), Ok(u32::MAX - 2 + 1));
assert_eq!(iter.clone().py_index(&db, -1), Ok(u32::MAX - 1 + 1));
assert_eq!(iter.clone().py_index(&db, 0), Ok(0));
assert_eq!(iter.clone().py_index(&db, 1), Ok(1));
assert_eq!(iter.clone().py_index(&db, i32::MAX), Ok(i32::MAX as u32));
}
#[track_caller]
fn assert_eq_slice<const N: usize, const M: usize>(
db: &dyn Db,
input: &[char; N],
start: Option<i32>,
stop: Option<i32>,
step: Option<i32>,
expected: &[char; M],
) {
assert_equal(
input.py_slice(db, start, stop, step).unwrap().collect_vec(),
expected.iter().copied().collect_vec(),
);
}
#[test]
fn py_slice_empty_input() {
let db = setup_db();
let input = [];
assert_eq_slice(&db, &input, None, None, None, &[]);
assert_eq_slice(&db, &input, Some(0), None, None, &[]);
assert_eq_slice(&db, &input, None, Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(0), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(-5), Some(-5), None, &[]);
assert_eq_slice(&db, &input, None, None, Some(-1), &[]);
assert_eq_slice(&db, &input, None, None, Some(2), &[]);
}
#[test]
fn py_slice_single_element_input() {
let db = setup_db();
let input = ['a'];
assert_eq_slice(&db, &input, None, None, None, &['a']);
assert_eq_slice(&db, &input, Some(0), None, None, &['a']);
assert_eq_slice(&db, &input, None, Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(0), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(0), Some(1), None, &['a']);
assert_eq_slice(&db, &input, Some(0), Some(2), None, &['a']);
assert_eq_slice(&db, &input, Some(-1), None, None, &['a']);
assert_eq_slice(&db, &input, Some(-1), Some(-1), None, &[]);
assert_eq_slice(&db, &input, Some(-1), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(-1), Some(1), None, &['a']);
assert_eq_slice(&db, &input, Some(-1), Some(2), None, &['a']);
assert_eq_slice(&db, &input, None, Some(-1), None, &[]);
assert_eq_slice(&db, &input, Some(-2), None, None, &['a']);
assert_eq_slice(&db, &input, Some(-2), Some(-1), None, &[]);
assert_eq_slice(&db, &input, Some(-2), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(-2), Some(1), None, &['a']);
assert_eq_slice(&db, &input, Some(-2), Some(2), None, &['a']);
}
#[test]
fn py_slice_nonnegative_indices() {
let db = setup_db();
let input = ['a', 'b', 'c', 'd', 'e'];
assert_eq_slice(&db, &input, None, Some(0), None, &[]);
assert_eq_slice(&db, &input, None, Some(1), None, &['a']);
assert_eq_slice(&db, &input, None, Some(4), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(&db, &input, None, Some(5), None, &['a', 'b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, None, Some(6), None, &['a', 'b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, None, None, None, &['a', 'b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(0), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(0), Some(1), None, &['a']);
assert_eq_slice(&db, &input, Some(0), Some(4), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(
&db,
&input,
Some(0),
Some(5),
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(
&db,
&input,
Some(0),
Some(6),
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(&db, &input, Some(0), None, None, &['a', 'b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(1), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(1), Some(1), None, &[]);
assert_eq_slice(&db, &input, Some(1), Some(2), None, &['b']);
assert_eq_slice(&db, &input, Some(1), Some(4), None, &['b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(1), Some(5), None, &['b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(1), Some(6), None, &['b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(1), None, None, &['b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(4), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(4), Some(4), None, &[]);
assert_eq_slice(&db, &input, Some(4), Some(5), None, &['e']);
assert_eq_slice(&db, &input, Some(4), Some(6), None, &['e']);
assert_eq_slice(&db, &input, Some(4), None, None, &['e']);
assert_eq_slice(&db, &input, Some(5), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(5), Some(5), None, &[]);
assert_eq_slice(&db, &input, Some(5), Some(6), None, &[]);
assert_eq_slice(&db, &input, Some(5), None, None, &[]);
assert_eq_slice(&db, &input, Some(6), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(6), Some(6), None, &[]);
assert_eq_slice(&db, &input, Some(6), None, None, &[]);
}
#[test]
fn py_slice_negative_indices() {
let db = setup_db();
let input = ['a', 'b', 'c', 'd', 'e'];
assert_eq_slice(
&db,
&input,
Some(-6),
None,
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(&db, &input, Some(-6), Some(-1), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(-6), Some(-4), None, &['a']);
assert_eq_slice(&db, &input, Some(-6), Some(-5), None, &[]);
assert_eq_slice(&db, &input, Some(-6), Some(-6), None, &[]);
assert_eq_slice(&db, &input, Some(-6), Some(-10), None, &[]);
assert_eq_slice(
&db,
&input,
Some(-5),
None,
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(&db, &input, Some(-5), Some(-1), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(-5), Some(-4), None, &['a']);
assert_eq_slice(&db, &input, Some(-5), Some(-5), None, &[]);
assert_eq_slice(&db, &input, Some(-5), Some(-6), None, &[]);
assert_eq_slice(&db, &input, Some(-5), Some(-10), None, &[]);
assert_eq_slice(&db, &input, Some(-4), None, None, &['b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(-4), Some(-1), None, &['b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(-4), Some(-3), None, &['b']);
assert_eq_slice(&db, &input, Some(-4), Some(-4), None, &[]);
assert_eq_slice(&db, &input, Some(-4), Some(-10), None, &[]);
assert_eq_slice(&db, &input, Some(-1), None, None, &['e']);
assert_eq_slice(&db, &input, Some(-1), Some(-1), None, &[]);
assert_eq_slice(&db, &input, Some(-1), Some(-10), None, &[]);
assert_eq_slice(&db, &input, None, Some(-1), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(&db, &input, None, Some(-4), None, &['a']);
assert_eq_slice(&db, &input, None, Some(-5), None, &[]);
assert_eq_slice(&db, &input, None, Some(-6), None, &[]);
}
#[test]
fn py_slice_mixed_positive_negative_indices() {
let db = setup_db();
let input = ['a', 'b', 'c', 'd', 'e'];
assert_eq_slice(&db, &input, Some(0), Some(-1), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(1), Some(-1), None, &['b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(3), Some(-1), None, &['d']);
assert_eq_slice(&db, &input, Some(4), Some(-1), None, &[]);
assert_eq_slice(&db, &input, Some(5), Some(-1), None, &[]);
assert_eq_slice(&db, &input, Some(0), Some(-4), None, &['a']);
assert_eq_slice(&db, &input, Some(1), Some(-4), None, &[]);
assert_eq_slice(&db, &input, Some(3), Some(-4), None, &[]);
assert_eq_slice(&db, &input, Some(0), Some(-5), None, &[]);
assert_eq_slice(&db, &input, Some(1), Some(-5), None, &[]);
assert_eq_slice(&db, &input, Some(3), Some(-5), None, &[]);
assert_eq_slice(&db, &input, Some(0), Some(-6), None, &[]);
assert_eq_slice(&db, &input, Some(1), Some(-6), None, &[]);
assert_eq_slice(
&db,
&input,
Some(-6),
Some(6),
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(
&db,
&input,
Some(-6),
Some(5),
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(&db, &input, Some(-6), Some(4), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(-6), Some(1), None, &['a']);
assert_eq_slice(&db, &input, Some(-6), Some(0), None, &[]);
assert_eq_slice(
&db,
&input,
Some(-5),
Some(6),
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(
&db,
&input,
Some(-5),
Some(5),
None,
&['a', 'b', 'c', 'd', 'e'],
);
assert_eq_slice(&db, &input, Some(-5), Some(4), None, &['a', 'b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(-5), Some(1), None, &['a']);
assert_eq_slice(&db, &input, Some(-5), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(-4), Some(6), None, &['b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(-4), Some(5), None, &['b', 'c', 'd', 'e']);
assert_eq_slice(&db, &input, Some(-4), Some(4), None, &['b', 'c', 'd']);
assert_eq_slice(&db, &input, Some(-4), Some(2), None, &['b']);
assert_eq_slice(&db, &input, Some(-4), Some(1), None, &[]);
assert_eq_slice(&db, &input, Some(-4), Some(0), None, &[]);
assert_eq_slice(&db, &input, Some(-1), Some(6), None, &['e']);
assert_eq_slice(&db, &input, Some(-1), Some(5), None, &['e']);
assert_eq_slice(&db, &input, Some(-1), Some(4), None, &[]);
assert_eq_slice(&db, &input, Some(-1), Some(1), None, &[]);
}
#[test]
fn py_slice_step_forward() {
let db = setup_db();
// indices: 0 1 2 3 4 5 6
let input = ['a', 'b', 'c', 'd', 'e', 'f', 'g'];
// Step size zero is invalid:
assert!(matches!(
input.py_slice(&db, None, None, Some(0)),
Err(StepSizeZeroError)
));
assert!(matches!(
input.py_slice(&db, Some(0), Some(5), Some(0)),
Err(StepSizeZeroError)
));
assert!(matches!(
input.py_slice(&db, Some(0), Some(0), Some(0)),
Err(StepSizeZeroError)
));
assert_eq_slice(
&db,
&input,
Some(0),
Some(8),
Some(2),
&['a', 'c', 'e', 'g'],
);
assert_eq_slice(
&db,
&input,
Some(0),
Some(7),
Some(2),
&['a', 'c', 'e', 'g'],
);
assert_eq_slice(&db, &input, Some(0), Some(6), Some(2), &['a', 'c', 'e']);
assert_eq_slice(&db, &input, Some(0), Some(5), Some(2), &['a', 'c', 'e']);
assert_eq_slice(&db, &input, Some(0), Some(4), Some(2), &['a', 'c']);
assert_eq_slice(&db, &input, Some(0), Some(3), Some(2), &['a', 'c']);
assert_eq_slice(&db, &input, Some(0), Some(2), Some(2), &['a']);
assert_eq_slice(&db, &input, Some(0), Some(1), Some(2), &['a']);
assert_eq_slice(&db, &input, Some(0), Some(0), Some(2), &[]);
assert_eq_slice(&db, &input, Some(1), Some(5), Some(2), &['b', 'd']);
assert_eq_slice(&db, &input, Some(0), Some(7), Some(3), &['a', 'd', 'g']);
assert_eq_slice(&db, &input, Some(0), Some(6), Some(3), &['a', 'd']);
assert_eq_slice(&db, &input, Some(0), None, Some(10), &['a']);
}
#[test]
fn py_slice_step_backward() {
let db = setup_db();
// indices: 0 1 2 3 4 5 6
let input = ['a', 'b', 'c', 'd', 'e', 'f', 'g'];
assert_eq_slice(&db, &input, Some(7), Some(0), Some(-2), &['g', 'e', 'c']);
assert_eq_slice(&db, &input, Some(6), Some(0), Some(-2), &['g', 'e', 'c']);
assert_eq_slice(&db, &input, Some(5), Some(0), Some(-2), &['f', 'd', 'b']);
assert_eq_slice(&db, &input, Some(4), Some(0), Some(-2), &['e', 'c']);
assert_eq_slice(&db, &input, Some(3), Some(0), Some(-2), &['d', 'b']);
assert_eq_slice(&db, &input, Some(2), Some(0), Some(-2), &['c']);
assert_eq_slice(&db, &input, Some(1), Some(0), Some(-2), &['b']);
assert_eq_slice(&db, &input, Some(0), Some(0), Some(-2), &[]);
assert_eq_slice(&db, &input, Some(7), None, Some(-2), &['g', 'e', 'c', 'a']);
assert_eq_slice(&db, &input, None, None, Some(-2), &['g', 'e', 'c', 'a']);
assert_eq_slice(&db, &input, None, Some(0), Some(-2), &['g', 'e', 'c']);
assert_eq_slice(&db, &input, Some(5), Some(1), Some(-2), &['f', 'd']);
assert_eq_slice(&db, &input, Some(5), Some(2), Some(-2), &['f', 'd']);
assert_eq_slice(&db, &input, Some(5), Some(3), Some(-2), &['f']);
assert_eq_slice(&db, &input, Some(5), Some(4), Some(-2), &['f']);
assert_eq_slice(&db, &input, Some(5), Some(5), Some(-2), &[]);
assert_eq_slice(&db, &input, Some(6), None, Some(-3), &['g', 'd', 'a']);
assert_eq_slice(&db, &input, Some(6), Some(0), Some(-3), &['g', 'd']);
assert_eq_slice(&db, &input, Some(7), None, Some(-10), &['g']);
assert_eq_slice(&db, &input, Some(-6), Some(-9), Some(-1), &['b', 'a']);
assert_eq_slice(&db, &input, Some(-6), Some(-8), Some(-1), &['b', 'a']);
assert_eq_slice(&db, &input, Some(-6), Some(-7), Some(-1), &['b']);
assert_eq_slice(&db, &input, Some(-6), Some(-6), Some(-1), &[]);
assert_eq_slice(&db, &input, Some(-7), Some(-9), Some(-1), &['a']);
assert_eq_slice(&db, &input, Some(-8), Some(-9), Some(-1), &[]);
assert_eq_slice(&db, &input, Some(-9), Some(-9), Some(-1), &[]);
assert_eq_slice(&db, &input, Some(-6), Some(-2), Some(-1), &[]);
assert_eq_slice(&db, &input, Some(-9), Some(-6), Some(-1), &[]);
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/semantic_model.rs | crates/ty_python_semantic/src/semantic_model.rs | use ruff_db::files::{File, FilePath};
use ruff_db::source::{line_index, source_text};
use ruff_python_ast::{self as ast, ExprStringLiteral, ModExpression};
use ruff_python_ast::{Expr, ExprRef, HasNodeIndex, name::Name};
use ruff_python_parser::Parsed;
use ruff_source_file::LineIndex;
use rustc_hash::FxHashMap;
use ty_module_resolver::{
KnownModule, Module, ModuleName, list_modules, resolve_module, resolve_real_shadowable_module,
};
use crate::Db;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::scope::FileScopeId;
use crate::semantic_index::semantic_index;
use crate::types::list_members::{Member, all_members, all_reachable_members};
use crate::types::{Type, binding_type, infer_scope_types};
/// The primary interface the LSP should use for querying semantic information about a [`File`].
///
/// Although you can in principle freely construct this type given a `db` and `file`, you should
/// try to construct this at the start of your analysis and thread the same instance through
/// the full analysis.
///
/// The primary reason for this is that it manages traversing into the sub-ASTs of string
/// annotations (see [`Self::enter_string_annotation`]). When you do this you will be handling
/// AST nodes that don't belong to the file's AST (or *any* file's AST). These kinds of nodes
/// will result in panics and confusing results if handed to the wrong subsystem. `SemanticModel`
/// methods will automatically handle using the string literal's AST node when necessary.
pub struct SemanticModel<'db> {
db: &'db dyn Db,
file: File,
/// If `Some` then this `SemanticModel` is for analyzing the sub-AST of a string annotation.
/// This expression will be used as a witness to the scope/location we're analyzing.
in_string_annotation_expr: Option<Box<Expr>>,
}
impl<'db> SemanticModel<'db> {
pub fn new(db: &'db dyn Db, file: File) -> Self {
Self {
db,
file,
in_string_annotation_expr: None,
}
}
pub fn db(&self) -> &'db dyn Db {
self.db
}
pub fn file(&self) -> File {
self.file
}
pub fn file_path(&self) -> &FilePath {
self.file.path(self.db)
}
pub fn line_index(&self) -> LineIndex {
line_index(self.db, self.file)
}
/// Returns a map from symbol name to that symbol's
/// type and definition site (if available).
///
/// The symbols are the symbols in scope at the given
/// AST node.
pub fn members_in_scope_at(
&self,
node: ast::AnyNodeRef<'_>,
) -> FxHashMap<Name, MemberDefinition<'db>> {
let index = semantic_index(self.db, self.file);
let mut members = FxHashMap::default();
let Some(file_scope) = self.scope(node) else {
return members;
};
for (file_scope, _) in index.ancestor_scopes(file_scope) {
for memberdef in
all_reachable_members(self.db, file_scope.to_scope_id(self.db, self.file))
{
members.insert(
memberdef.member.name,
MemberDefinition {
ty: memberdef.member.ty,
first_reachable_definition: memberdef.first_reachable_definition,
},
);
}
}
members
}
/// Resolve the given import made in this file to a Type
pub fn resolve_module_type(&self, module: Option<&str>, level: u32) -> Option<Type<'db>> {
let module = self.resolve_module(module, level)?;
Some(Type::module_literal(self.db, self.file, module))
}
/// Resolve the given import made in this file to a Module
pub fn resolve_module(&self, module: Option<&str>, level: u32) -> Option<Module<'db>> {
let module_name =
ModuleName::from_identifier_parts(self.db, self.file, module, level).ok()?;
resolve_module(self.db, self.file, &module_name)
}
/// Returns completions for symbols available in a `import <CURSOR>` context.
pub fn import_completions(&self) -> Vec<Completion<'db>> {
let typing_extensions = ModuleName::new_static("typing_extensions").unwrap();
let is_typing_extensions_available = self.file.is_stub(self.db)
|| resolve_real_shadowable_module(self.db, self.file, &typing_extensions).is_some();
list_modules(self.db)
.into_iter()
.filter(|module| {
is_typing_extensions_available || module.name(self.db) != &typing_extensions
})
.map(|module| {
let builtin = module.is_known(self.db, KnownModule::Builtins);
let ty = Type::module_literal(self.db, self.file, module);
Completion {
name: Name::new(module.name(self.db).as_str()),
ty: Some(ty),
builtin,
}
})
.collect()
}
/// Returns completions for symbols available in a `from module import <CURSOR>` context.
pub fn from_import_completions(&self, import: &ast::StmtImportFrom) -> Vec<Completion<'db>> {
let module_name = match ModuleName::from_import_statement(self.db, self.file, import) {
Ok(module_name) => module_name,
Err(err) => {
tracing::debug!(
"Could not extract module name from `{module:?}` with level {level}: {err:?}",
module = import.module,
level = import.level,
);
return vec![];
}
};
self.module_completions(&module_name)
}
/// Returns submodule-only completions for the given module.
pub fn import_submodule_completions_for_name(
&self,
module_name: &ModuleName,
) -> Vec<Completion<'db>> {
let Some(module) = resolve_module(self.db, self.file, module_name) else {
tracing::debug!("Could not resolve module from `{module_name:?}`");
return vec![];
};
self.submodule_completions(&module)
}
/// Returns completions for symbols available in the given module as if
/// it were imported by this model's `File`.
fn module_completions(&self, module_name: &ModuleName) -> Vec<Completion<'db>> {
let Some(module) = resolve_module(self.db, self.file, module_name) else {
tracing::debug!("Could not resolve module from `{module_name:?}`");
return vec![];
};
let ty = Type::module_literal(self.db, self.file, module);
let builtin = module.is_known(self.db, KnownModule::Builtins);
let mut completions = vec![];
for Member { name, ty } in all_members(self.db, ty) {
completions.push(Completion {
name,
ty: Some(ty),
builtin,
});
}
completions.extend(self.submodule_completions(&module));
completions
}
/// Returns completions for submodules of the given module.
fn submodule_completions(&self, module: &Module<'db>) -> Vec<Completion<'db>> {
let builtin = module.is_known(self.db, KnownModule::Builtins);
let mut completions = vec![];
for submodule in module.all_submodules(self.db) {
let ty = Type::module_literal(self.db, self.file, *submodule);
let Some(base) = submodule.name(self.db).components().next_back() else {
continue;
};
completions.push(Completion {
name: Name::new(base),
ty: Some(ty),
builtin,
});
}
completions
}
/// Returns completions for symbols available in a `object.<CURSOR>` context.
pub fn attribute_completions(&self, node: &ast::ExprAttribute) -> Vec<Completion<'db>> {
let Some(ty) = node.value.inferred_type(self) else {
return Vec::new();
};
all_members(self.db, ty)
.into_iter()
.map(|member| Completion {
name: member.name,
ty: Some(member.ty),
builtin: false,
})
.collect()
}
/// Returns completions for symbols available in the scope containing the
/// given expression.
///
/// If a scope could not be determined, then completions for the global
/// scope of this model's `File` are returned.
pub fn scoped_completions(&self, node: ast::AnyNodeRef<'_>) -> Vec<Completion<'db>> {
let index = semantic_index(self.db, self.file);
let Some(file_scope) = self.scope(node) else {
return vec![];
};
let mut completions = vec![];
for (file_scope, _) in index.ancestor_scopes(file_scope) {
completions.extend(
all_reachable_members(self.db, file_scope.to_scope_id(self.db, self.file)).map(
|memberdef| Completion {
name: memberdef.member.name,
ty: Some(memberdef.member.ty),
builtin: false,
},
),
);
}
// Builtins are available in all scopes.
let builtins = ModuleName::new_static("builtins").expect("valid module name");
completions.extend(self.module_completions(&builtins));
completions
}
/// Returns the scope in which `node` is defined (handles string annotations).
pub fn scope(&self, node: ast::AnyNodeRef<'_>) -> Option<FileScopeId> {
let index = semantic_index(self.db, self.file);
match self.node_in_ast(node) {
ast::AnyNodeRef::Identifier(identifier) => index.try_expression_scope_id(identifier),
// Nodes implementing `HasDefinition`
ast::AnyNodeRef::StmtFunctionDef(function) => Some(
function
.definition(self)
.scope(self.db)
.file_scope_id(self.db),
),
ast::AnyNodeRef::StmtClassDef(class) => {
Some(class.definition(self).scope(self.db).file_scope_id(self.db))
}
ast::AnyNodeRef::Parameter(parameter) => Some(
parameter
.definition(self)
.scope(self.db)
.file_scope_id(self.db),
),
ast::AnyNodeRef::ParameterWithDefault(parameter) => Some(
parameter
.definition(self)
.scope(self.db)
.file_scope_id(self.db),
),
ast::AnyNodeRef::ExceptHandlerExceptHandler(handler) => Some(
handler
.definition(self)
.scope(self.db)
.file_scope_id(self.db),
),
ast::AnyNodeRef::TypeParamTypeVar(var) => {
Some(var.definition(self).scope(self.db).file_scope_id(self.db))
}
// Fallback
node => match node.as_expr_ref() {
// If we couldn't identify a specific
// expression that we're in, then just
// fall back to the global scope.
None => Some(FileScopeId::global()),
Some(expr) => index.try_expression_scope_id(&expr),
},
}
}
/// Get a "safe" [`ast::AnyNodeRef`] to use for referring to the given (sub-)AST node.
///
/// If we're analyzing a string annotation, it will return the string literal's node.
/// Otherwise it will return the input.
pub fn node_in_ast<'a>(&'a self, node: ast::AnyNodeRef<'a>) -> ast::AnyNodeRef<'a> {
if let Some(string_annotation) = &self.in_string_annotation_expr {
(&**string_annotation).into()
} else {
node
}
}
/// Get a "safe" [`Expr`] to use for referring to the given (sub-)expression.
///
/// If we're analyzing a string annotation, it will return the string literal's expression.
/// Otherwise it will return the input.
pub fn expr_in_ast<'a>(&'a self, expr: &'a Expr) -> &'a Expr {
if let Some(string_annotation) = &self.in_string_annotation_expr {
string_annotation
} else {
expr
}
}
/// Get a "safe" [`ExprRef`] to use for referring to the given (sub-)expression.
///
/// If we're analyzing a string annotation, it will return the string literal's expression.
/// Otherwise it will return the input.
pub fn expr_ref_in_ast<'a>(&'a self, expr: ExprRef<'a>) -> ExprRef<'a> {
if let Some(string_annotation) = &self.in_string_annotation_expr {
ExprRef::from(string_annotation)
} else {
expr
}
}
/// Given a string expression, determine if it's a string annotation, and if it is,
/// yield the parsed sub-AST and a sub-model that knows it's analyzing a sub-AST.
///
/// Analysis of the sub-AST should only be done with the sub-model, or else things
/// may return nonsense results or even panic!
pub fn enter_string_annotation(
&self,
string_expr: &ExprStringLiteral,
) -> Option<(Parsed<ModExpression>, Self)> {
// String annotations can't contain string annotations
if self.in_string_annotation_expr.is_some() {
return None;
}
// Ask the inference engine whether this is actually a string annotation
let expr = ExprRef::StringLiteral(string_expr);
let index = semantic_index(self.db, self.file);
let file_scope = index.expression_scope_id(&expr);
let scope = file_scope.to_scope_id(self.db, self.file);
if !infer_scope_types(self.db, scope).is_string_annotation(expr) {
return None;
}
// Parse the sub-AST and create a semantic model that knows it's in a sub-AST
//
// The string_annotation will be used as the expr/node for any query that needs
// to look up a node in the AST to prevent panics, because these sub-AST nodes
// are not in the File's AST!
let source = source_text(self.db, self.file);
let string_literal = string_expr.as_single_part_string()?;
let ast =
ruff_python_parser::parse_string_annotation(source.as_str(), string_literal).ok()?;
let model = Self {
db: self.db,
file: self.file,
in_string_annotation_expr: Some(Box::new(Expr::StringLiteral(string_expr.clone()))),
};
Some((ast, model))
}
}
/// The type and definition of a symbol.
#[derive(Clone, Debug)]
pub struct MemberDefinition<'db> {
pub ty: Type<'db>,
pub first_reachable_definition: Definition<'db>,
}
/// A classification of symbol names.
///
/// The ordering here is used for sorting completions.
///
/// This sorts "normal" names first, then dunder names and finally
/// single-underscore names. This matches the order of the variants defined for
/// this enum, which is in turn picked up by the derived trait implementation
/// for `Ord`.
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub enum NameKind {
Normal,
Dunder,
Sunder,
}
impl NameKind {
pub fn classify(name: &Name) -> NameKind {
// Dunder needs a prefix and suffix double underscore.
// When there's only a prefix double underscore, this
// results in explicit name mangling. We let that be
// classified as-if they were single underscore names.
//
// Ref: <https://docs.python.org/3/reference/lexical_analysis.html#reserved-classes-of-identifiers>
if name.starts_with("__") && name.ends_with("__") {
NameKind::Dunder
} else if name.starts_with('_') {
NameKind::Sunder
} else {
NameKind::Normal
}
}
}
/// A suggestion for code completion.
#[derive(Clone, Debug)]
pub struct Completion<'db> {
/// The label shown to the user for this suggestion.
pub name: Name,
/// The type of this completion, if available.
///
/// Generally speaking, this is always available
/// *unless* this was a completion corresponding to
/// an unimported symbol. In that case, computing the
/// type of all such symbols could be quite expensive.
pub ty: Option<Type<'db>>,
/// Whether this suggestion came from builtins or not.
///
/// At time of writing (2025-06-26), this information
/// doesn't make it into the LSP response. Instead, we
/// use it mainly in tests so that we can write less
/// noisy tests.
pub builtin: bool,
}
impl<'db> Completion<'db> {
pub fn is_type_check_only(&self, db: &'db dyn Db) -> bool {
self.ty.is_some_and(|ty| ty.is_type_check_only(db))
}
}
pub trait HasType {
/// Returns the inferred type of `self`.
///
/// ## Panics
/// May panic if `self` is from another file than `model`.
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option<Type<'db>>;
}
pub trait HasDefinition {
/// Returns the definition of `self`.
///
/// ## Panics
/// May panic if `self` is from another file than `model`.
fn definition<'db>(&self, model: &SemanticModel<'db>) -> Definition<'db>;
}
impl HasType for ast::ExprRef<'_> {
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option<Type<'db>> {
let index = semantic_index(model.db, model.file);
// TODO(#1637): semantic tokens is making this crash even with
// `try_expr_ref_in_ast` guarding this, for now just use `try_expression_scope_id`.
// The problematic input is `x: "float` (with a dangling quote). I imagine the issue
// is we're too eagerly setting `is_string_annotation` in inference.
let file_scope = index.try_expression_scope_id(&model.expr_ref_in_ast(*self))?;
let scope = file_scope.to_scope_id(model.db, model.file);
infer_scope_types(model.db, scope).try_expression_type(*self)
}
}
macro_rules! impl_expression_has_type {
($ty: ty) => {
impl HasType for $ty {
#[inline]
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option<Type<'db>> {
let expression_ref = ExprRef::from(self);
expression_ref.inferred_type(model)
}
}
};
}
impl_expression_has_type!(ast::ExprBoolOp);
impl_expression_has_type!(ast::ExprNamed);
impl_expression_has_type!(ast::ExprBinOp);
impl_expression_has_type!(ast::ExprUnaryOp);
impl_expression_has_type!(ast::ExprLambda);
impl_expression_has_type!(ast::ExprIf);
impl_expression_has_type!(ast::ExprDict);
impl_expression_has_type!(ast::ExprSet);
impl_expression_has_type!(ast::ExprListComp);
impl_expression_has_type!(ast::ExprSetComp);
impl_expression_has_type!(ast::ExprDictComp);
impl_expression_has_type!(ast::ExprGenerator);
impl_expression_has_type!(ast::ExprAwait);
impl_expression_has_type!(ast::ExprYield);
impl_expression_has_type!(ast::ExprYieldFrom);
impl_expression_has_type!(ast::ExprCompare);
impl_expression_has_type!(ast::ExprCall);
impl_expression_has_type!(ast::ExprFString);
impl_expression_has_type!(ast::ExprTString);
impl_expression_has_type!(ast::ExprStringLiteral);
impl_expression_has_type!(ast::ExprBytesLiteral);
impl_expression_has_type!(ast::ExprNumberLiteral);
impl_expression_has_type!(ast::ExprBooleanLiteral);
impl_expression_has_type!(ast::ExprNoneLiteral);
impl_expression_has_type!(ast::ExprEllipsisLiteral);
impl_expression_has_type!(ast::ExprAttribute);
impl_expression_has_type!(ast::ExprSubscript);
impl_expression_has_type!(ast::ExprStarred);
impl_expression_has_type!(ast::ExprName);
impl_expression_has_type!(ast::ExprList);
impl_expression_has_type!(ast::ExprTuple);
impl_expression_has_type!(ast::ExprSlice);
impl_expression_has_type!(ast::ExprIpyEscapeCommand);
impl HasType for ast::Expr {
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option<Type<'db>> {
match self {
Expr::BoolOp(inner) => inner.inferred_type(model),
Expr::Named(inner) => inner.inferred_type(model),
Expr::BinOp(inner) => inner.inferred_type(model),
Expr::UnaryOp(inner) => inner.inferred_type(model),
Expr::Lambda(inner) => inner.inferred_type(model),
Expr::If(inner) => inner.inferred_type(model),
Expr::Dict(inner) => inner.inferred_type(model),
Expr::Set(inner) => inner.inferred_type(model),
Expr::ListComp(inner) => inner.inferred_type(model),
Expr::SetComp(inner) => inner.inferred_type(model),
Expr::DictComp(inner) => inner.inferred_type(model),
Expr::Generator(inner) => inner.inferred_type(model),
Expr::Await(inner) => inner.inferred_type(model),
Expr::Yield(inner) => inner.inferred_type(model),
Expr::YieldFrom(inner) => inner.inferred_type(model),
Expr::Compare(inner) => inner.inferred_type(model),
Expr::Call(inner) => inner.inferred_type(model),
Expr::FString(inner) => inner.inferred_type(model),
Expr::TString(inner) => inner.inferred_type(model),
Expr::StringLiteral(inner) => inner.inferred_type(model),
Expr::BytesLiteral(inner) => inner.inferred_type(model),
Expr::NumberLiteral(inner) => inner.inferred_type(model),
Expr::BooleanLiteral(inner) => inner.inferred_type(model),
Expr::NoneLiteral(inner) => inner.inferred_type(model),
Expr::EllipsisLiteral(inner) => inner.inferred_type(model),
Expr::Attribute(inner) => inner.inferred_type(model),
Expr::Subscript(inner) => inner.inferred_type(model),
Expr::Starred(inner) => inner.inferred_type(model),
Expr::Name(inner) => inner.inferred_type(model),
Expr::List(inner) => inner.inferred_type(model),
Expr::Tuple(inner) => inner.inferred_type(model),
Expr::Slice(inner) => inner.inferred_type(model),
Expr::IpyEscapeCommand(inner) => inner.inferred_type(model),
}
}
}
macro_rules! impl_binding_has_ty_def {
($ty: ty) => {
impl HasDefinition for $ty {
#[inline]
fn definition<'db>(&self, model: &SemanticModel<'db>) -> Definition<'db> {
let index = semantic_index(model.db, model.file);
index.expect_single_definition(self)
}
}
impl HasType for $ty {
#[inline]
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option<Type<'db>> {
let binding = HasDefinition::definition(self, model);
Some(binding_type(model.db, binding))
}
}
};
}
impl_binding_has_ty_def!(ast::StmtFunctionDef);
impl_binding_has_ty_def!(ast::StmtClassDef);
impl_binding_has_ty_def!(ast::Parameter);
impl_binding_has_ty_def!(ast::ParameterWithDefault);
impl_binding_has_ty_def!(ast::ExceptHandlerExceptHandler);
impl_binding_has_ty_def!(ast::TypeParamTypeVar);
impl HasType for ast::Alias {
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option<Type<'db>> {
if &self.name == "*" {
return Some(Type::Never);
}
let index = semantic_index(model.db, model.file);
Some(binding_type(model.db, index.expect_single_definition(self)))
}
}
/// Implemented by types for which the semantic index tracks their scope.
pub(crate) trait HasTrackedScope: HasNodeIndex {}
impl HasTrackedScope for ast::Expr {}
impl HasTrackedScope for ast::ExprRef<'_> {}
impl HasTrackedScope for &ast::ExprRef<'_> {}
// We never explicitly register the scope of an `Identifier`.
// However, `ExpressionsScopeMap` stores the text ranges of each scope.
// That allows us to look up the identifier's scope for as long as it's
// inside an expression (because the ranges overlap).
impl HasTrackedScope for ast::Identifier {}
#[cfg(test)]
mod tests {
use ruff_db::files::system_path_to_file;
use ruff_db::parsed::parsed_module;
use crate::db::tests::TestDbBuilder;
use crate::{HasType, SemanticModel};
#[test]
fn function_type() -> anyhow::Result<()> {
let db = TestDbBuilder::new()
.with_file("/src/foo.py", "def test(): pass")
.build()?;
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
let ast = parsed_module(&db, foo).load(&db);
let function = ast.suite()[0].as_function_def_stmt().unwrap();
let model = SemanticModel::new(&db, foo);
let ty = function.inferred_type(&model).unwrap();
assert!(ty.is_function_literal());
Ok(())
}
#[test]
fn class_type() -> anyhow::Result<()> {
let db = TestDbBuilder::new()
.with_file("/src/foo.py", "class Test: pass")
.build()?;
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
let ast = parsed_module(&db, foo).load(&db);
let class = ast.suite()[0].as_class_def_stmt().unwrap();
let model = SemanticModel::new(&db, foo);
let ty = class.inferred_type(&model).unwrap();
assert!(ty.is_class_literal());
Ok(())
}
#[test]
fn alias_type() -> anyhow::Result<()> {
let db = TestDbBuilder::new()
.with_file("/src/foo.py", "class Test: pass")
.with_file("/src/bar.py", "from foo import Test")
.build()?;
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
let ast = parsed_module(&db, bar).load(&db);
let import = ast.suite()[0].as_import_from_stmt().unwrap();
let alias = &import.names[0];
let model = SemanticModel::new(&db, bar);
let ty = alias.inferred_type(&model).unwrap();
assert!(ty.is_class_literal());
Ok(())
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/diagnostic.rs | crates/ty_python_semantic/src/diagnostic.rs | use crate::{
Db, Program, PythonVersionWithSource, lint::lint_documentation_url, types::TypeCheckDiagnostics,
};
use ruff_db::{
diagnostic::{Annotation, Diagnostic, DiagnosticId, SubDiagnostic, SubDiagnosticSeverity},
files::File,
};
use std::cell::RefCell;
use std::fmt::Write;
/// Suggest a name from `existing_names` that is similar to `wrong_name`.
pub(crate) fn did_you_mean<S: AsRef<str>, T: AsRef<str>>(
existing_names: impl Iterator<Item = S>,
wrong_name: T,
) -> Option<String> {
if wrong_name.as_ref().len() < 3 {
return None;
}
existing_names
.filter(|ref id| id.as_ref().len() >= 2)
.map(|ref id| {
(
id.as_ref().to_string(),
strsim::damerau_levenshtein(
&id.as_ref().to_lowercase(),
&wrong_name.as_ref().to_lowercase(),
),
)
})
.min_by_key(|(_, dist)| *dist)
// Heuristic to filter out bad matches
.filter(|(_, dist)| *dist <= 3)
.map(|(id, _)| id)
}
/// Add a subdiagnostic to `diagnostic` that explains why a certain Python version was inferred.
///
/// ty can infer the Python version from various sources, such as command-line arguments,
/// configuration files, or defaults.
pub fn add_inferred_python_version_hint_to_diagnostic(
db: &dyn Db,
diagnostic: &mut Diagnostic,
action: &str,
) {
let program = Program::get(db);
let PythonVersionWithSource { version, source } = program.python_version_with_source(db);
match source {
crate::PythonVersionSource::Cli => {
diagnostic.info(format_args!(
"Python {version} was assumed when {action} because it was specified on the command line",
));
}
crate::PythonVersionSource::ConfigFile(source) => {
if let Some(span) = source.span(db) {
let mut sub_diagnostic = SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format_args!("Python {version} was assumed when {action}"),
);
sub_diagnostic
.annotate(Annotation::primary(span).message("Python version configuration"));
diagnostic.sub(sub_diagnostic);
} else {
diagnostic.info(format_args!(
"Python {version} was assumed when {action} because of your configuration file(s)",
));
}
}
crate::PythonVersionSource::PyvenvCfgFile(source) => {
if let Some(span) = source.span(db) {
let mut sub_diagnostic = SubDiagnostic::new(
SubDiagnosticSeverity::Info,
format_args!(
"Python {version} was assumed when {action} because of your virtual environment"
),
);
sub_diagnostic
.annotate(Annotation::primary(span).message("Virtual environment metadata"));
// TODO: it would also be nice to tell them how we resolved their virtual environment...
diagnostic.sub(sub_diagnostic);
} else {
diagnostic.info(format_args!(
"Python {version} was assumed when {action} because \
your virtual environment's pyvenv.cfg file indicated \
it was the Python version being used",
));
}
diagnostic.info(
"No Python version was specified on the command line \
or in a configuration file",
);
}
crate::PythonVersionSource::Editor => {
diagnostic.info(format_args!(
"Python {version} was assumed when {action} \
because it's the version of the selected Python interpreter in your editor",
));
}
crate::PythonVersionSource::InstallationDirectoryLayout {
site_packages_parent_dir,
} => {
// TODO: it would also be nice to tell them how we resolved this Python installation...
diagnostic.info(format_args!(
"Python {version} was assumed when {action} \
because of the layout of your Python installation"
));
diagnostic.info(format_args!(
"The primary `site-packages` directory of your installation was found \
at `lib/{site_packages_parent_dir}/site-packages/`"
));
diagnostic.info(
"No Python version was specified on the command line \
or in a configuration file",
);
}
crate::PythonVersionSource::Default => {
diagnostic.info(format_args!(
"Python {version} was assumed when {action} \
because it is the newest Python version supported by ty, \
and neither a command-line argument nor a configuration setting was provided",
));
}
}
}
/// Format a list of elements as a human-readable enumeration.
///
/// Encloses every element in backticks (`1`, `2` and `3`).
pub(crate) fn format_enumeration<I, IT, D>(elements: I) -> String
where
I: IntoIterator<IntoIter = IT>,
IT: ExactSizeIterator<Item = D> + DoubleEndedIterator,
D: std::fmt::Display,
{
let mut elements = elements.into_iter();
debug_assert!(elements.len() >= 2);
let final_element = elements.next_back().unwrap();
let penultimate_element = elements.next_back().unwrap();
let mut buffer = String::new();
for element in elements {
write!(&mut buffer, "`{element}`, ").ok();
}
write!(&mut buffer, "`{penultimate_element}` and `{final_element}`").ok();
buffer
}
/// An abstraction for mutating a diagnostic.
///
/// Callers likely should use `LintDiagnosticGuard` via
/// `InferContext::report_lint` instead. This guard is only intended for use
/// with non-lint diagnostics or non-type checking diagnostics. It is fundamentally lower level and easier to
/// get things wrong by using it.
///
/// Unlike `LintDiagnosticGuard`, this API does not guarantee that the
/// constructed `Diagnostic` not only has a primary annotation, but its
/// associated file is equivalent to the file being type checked. As a result,
/// if either is violated, then the `Drop` impl on `DiagnosticGuard` will
/// panic.
pub(super) struct DiagnosticGuard<'sink> {
/// The file of the primary span (to which file does this diagnostic belong).
file: File,
/// The target where to emit the diagnostic to.
///
/// We use a [`RefCell`] here over a `&mut TypeCheckDiagnostics` to ensure the fact that
/// `InferContext` (and other contexts with diagnostics) use a [`RefCell`] internally
/// remains abstracted away. Specifically, we want to ensure that calling `report_lint` on
/// `InferContext` twice doesn't result in a panic:
///
/// ```ignore
/// let diag1 = context.report_lint(...);
///
/// // would panic if using a `&mut TypeCheckDiagnostics`
/// // because of a second mutable borrow.
/// let diag2 = context.report_lint(...);
/// ```
sink: &'sink RefCell<TypeCheckDiagnostics>,
/// The diagnostic that we want to report.
///
/// This is always `Some` until the `Drop` impl.
diag: Option<Diagnostic>,
}
impl<'sink> DiagnosticGuard<'sink> {
pub(crate) fn new(
file: File,
sink: &'sink std::cell::RefCell<TypeCheckDiagnostics>,
diag: Diagnostic,
) -> Self {
Self {
file,
sink,
diag: Some(diag),
}
}
}
impl std::ops::Deref for DiagnosticGuard<'_> {
type Target = Diagnostic;
fn deref(&self) -> &Diagnostic {
// OK because `self.diag` is only `None` within `Drop`.
self.diag.as_ref().unwrap()
}
}
/// Return a mutable borrow of the diagnostic in this guard.
///
/// Callers may mutate the diagnostic to add new sub-diagnostics
/// or annotations.
///
/// The diagnostic is added to the typing context, if appropriate,
/// when this guard is dropped.
impl std::ops::DerefMut for DiagnosticGuard<'_> {
fn deref_mut(&mut self) -> &mut Diagnostic {
// OK because `self.diag` is only `None` within `Drop`.
self.diag.as_mut().unwrap()
}
}
/// Finishes use of this guard.
///
/// This will add the diagnostic to the typing context if appropriate.
///
/// # Panics
///
/// This panics when the underlying diagnostic lacks a primary
/// annotation, or if it has one and its file doesn't match the file
/// being type checked.
impl Drop for DiagnosticGuard<'_> {
fn drop(&mut self) {
if std::thread::panicking() {
// Don't submit diagnostics when panicking because they might be incomplete.
return;
}
// OK because the only way `self.diag` is `None`
// is via this impl, which can only run at most
// once.
let mut diag = self.diag.take().unwrap();
let Some(ann) = diag.primary_annotation() else {
panic!(
"All diagnostics reported by `InferContext` must have a \
primary annotation, but diagnostic {id} does not",
id = diag.id(),
);
};
let expected_file = self.file;
let got_file = ann.get_span().expect_ty_file();
assert_eq!(
expected_file,
got_file,
"All diagnostics reported by `InferContext` must have a \
primary annotation whose file matches the file of the \
current typing context, but diagnostic {id} has file \
{got_file:?} and we expected {expected_file:?}",
id = diag.id(),
);
if let DiagnosticId::Lint(lint_name) = diag.id()
&& diag.documentation_url().is_none()
{
diag.set_documentation_url(Some(lint_documentation_url(lint_name)));
}
self.sink.borrow_mut().push(diag);
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/list_members.rs | crates/ty_python_semantic/src/types/list_members.rs | //! Routines and types to list all members present on a given type or in a given scope.
//!
//! These two concepts are closely related, since listing all members of a given
//! module-literal type requires listing all members in the module's scope, and
//! listing all members on a nominal-instance type or a class-literal type requires
//! listing all members in the class's body scope.
use std::cmp::Ordering;
use ruff_python_ast::name::Name;
use rustc_hash::FxHashSet;
use crate::{
Db, NameKind,
place::{
Place, PlaceWithDefinition, imported_symbol, place_from_bindings, place_from_declarations,
},
semantic_index::{
attribute_scopes, definition::Definition, global_scope, place_table, scope::ScopeId,
semantic_index, use_def_map,
},
types::{
ClassBase, ClassLiteral, KnownClass, KnownInstanceType, SubclassOfInner, Type,
TypeVarBoundOrConstraints, class::CodeGeneratorKind, generics::Specialization,
},
};
/// Iterate over all declarations and bindings that exist at the end
/// of the given scope.
pub(crate) fn all_end_of_scope_members<'db>(
db: &'db dyn Db,
scope_id: ScopeId<'db>,
) -> impl Iterator<Item = MemberWithDefinition<'db>> + 'db {
let use_def_map = use_def_map(db, scope_id);
let table = place_table(db, scope_id);
use_def_map
.all_end_of_scope_symbol_declarations()
.filter_map(move |(symbol_id, declarations)| {
let place_result = place_from_declarations(db, declarations);
let first_reachable_definition = place_result.first_declaration?;
let ty = place_result
.ignore_conflicting_declarations()
.place
.ignore_possibly_undefined()?;
let symbol = table.symbol(symbol_id);
let member = Member {
name: symbol.name().clone(),
ty,
};
Some(MemberWithDefinition {
member,
first_reachable_definition,
})
})
.chain(use_def_map.all_end_of_scope_symbol_bindings().filter_map(
move |(symbol_id, bindings)| {
let PlaceWithDefinition {
place,
first_definition,
} = place_from_bindings(db, bindings);
let first_reachable_definition = first_definition?;
let ty = place.ignore_possibly_undefined()?;
let symbol = table.symbol(symbol_id);
let member = Member {
name: symbol.name().clone(),
ty,
};
Some(MemberWithDefinition {
member,
first_reachable_definition,
})
},
))
}
/// Iterate over all declarations and bindings that are reachable anywhere
/// in the given scope.
pub(crate) fn all_reachable_members<'db>(
db: &'db dyn Db,
scope_id: ScopeId<'db>,
) -> impl Iterator<Item = MemberWithDefinition<'db>> + 'db {
let use_def_map = use_def_map(db, scope_id);
let table = place_table(db, scope_id);
use_def_map
.all_reachable_symbols()
.flat_map(move |(symbol_id, declarations, bindings)| {
let symbol = table.symbol(symbol_id);
let declaration_place_result = place_from_declarations(db, declarations);
let declaration =
declaration_place_result
.first_declaration
.and_then(|first_reachable_definition| {
let ty = declaration_place_result
.ignore_conflicting_declarations()
.place
.ignore_possibly_undefined()?;
let member = Member {
name: symbol.name().clone(),
ty,
};
Some(MemberWithDefinition {
member,
first_reachable_definition,
})
});
let place_with_definition = place_from_bindings(db, bindings);
let binding =
place_with_definition
.first_definition
.and_then(|first_reachable_definition| {
let ty = place_with_definition.place.ignore_possibly_undefined()?;
let member = Member {
name: symbol.name().clone(),
ty,
};
Some(MemberWithDefinition {
member,
first_reachable_definition,
})
});
[declaration, binding]
})
.flatten()
}
// `__init__`, `__repr__`, `__eq__`, `__ne__` and `__hash__` are always included via `object`,
// so we don't need to list them here.
const SYNTHETIC_DATACLASS_ATTRIBUTES: &[&str] = &[
"__lt__",
"__le__",
"__gt__",
"__ge__",
"__replace__",
"__setattr__",
"__delattr__",
"__slots__",
"__weakref__",
"__match_args__",
"__dataclass_fields__",
"__dataclass_params__",
];
struct AllMembers<'db> {
members: FxHashSet<Member<'db>>,
}
impl<'db> AllMembers<'db> {
fn of(db: &'db dyn Db, ty: Type<'db>) -> Self {
let mut all_members = Self {
members: FxHashSet::default(),
};
all_members.extend_with_type(db, ty);
all_members
}
fn extend_with_type(&mut self, db: &'db dyn Db, ty: Type<'db>) {
match ty {
Type::Union(union) => self.members.extend(
union
.elements(db)
.iter()
.map(|ty| AllMembers::of(db, *ty).members)
.reduce(|acc, members| acc.intersection(&members).cloned().collect())
.unwrap_or_default(),
),
Type::Intersection(intersection) => self.members.extend(
intersection
.positive(db)
.iter()
.map(|ty| AllMembers::of(db, *ty).members)
.reduce(|acc, members| acc.union(&members).cloned().collect())
.unwrap_or_default(),
),
Type::NominalInstance(instance) => {
let (class_literal, specialization) = instance.class(db).class_literal(db);
self.extend_with_instance_members(db, ty, class_literal);
self.extend_with_synthetic_members(db, ty, class_literal, specialization);
}
Type::NewTypeInstance(newtype) => {
self.extend_with_type(db, newtype.concrete_base_type(db));
}
Type::ClassLiteral(class_literal) if class_literal.is_typed_dict(db) => {
self.extend_with_type(db, KnownClass::TypedDictFallback.to_class_literal(db));
}
Type::GenericAlias(generic_alias) if generic_alias.is_typed_dict(db) => {
self.extend_with_type(db, KnownClass::TypedDictFallback.to_class_literal(db));
}
Type::SubclassOf(subclass_of_type) if subclass_of_type.is_typed_dict(db) => {
self.extend_with_type(db, KnownClass::TypedDictFallback.to_class_literal(db));
}
Type::ClassLiteral(class_literal) => {
self.extend_with_class_members(db, ty, class_literal);
self.extend_with_synthetic_members(db, ty, class_literal, None);
if let Type::ClassLiteral(metaclass) = class_literal.metaclass(db) {
self.extend_with_class_members(db, ty, metaclass);
}
}
Type::GenericAlias(generic_alias) => {
let class_literal = generic_alias.origin(db);
self.extend_with_class_members(db, ty, class_literal);
self.extend_with_synthetic_members(db, ty, class_literal, None);
if let Type::ClassLiteral(metaclass) = class_literal.metaclass(db) {
self.extend_with_class_members(db, ty, metaclass);
}
}
Type::SubclassOf(subclass_of_type) => match subclass_of_type.subclass_of() {
SubclassOfInner::Dynamic(_) => {
self.extend_with_type(db, KnownClass::Type.to_instance(db));
}
_ => {
if let Some(class_type) = subclass_of_type.subclass_of().into_class(db) {
let (class_literal, specialization) = class_type.class_literal(db);
self.extend_with_class_members(db, ty, class_literal);
self.extend_with_synthetic_members(db, ty, class_literal, specialization);
if let Type::ClassLiteral(metaclass) = class_literal.metaclass(db) {
self.extend_with_class_members(db, ty, metaclass);
}
}
}
},
Type::Dynamic(_) | Type::Never | Type::AlwaysTruthy | Type::AlwaysFalsy => {
self.extend_with_type(db, Type::object());
}
Type::TypeAlias(alias) => self.extend_with_type(db, alias.value_type(db)),
Type::TypeVar(bound_typevar) => {
match bound_typevar.typevar(db).bound_or_constraints(db) {
None => {
self.extend_with_type(db, Type::object());
}
Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
self.extend_with_type(db, bound);
}
Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
self.members.extend(
constraints
.elements(db)
.iter()
.map(|ty| AllMembers::of(db, *ty).members)
.reduce(|acc, members| {
acc.intersection(&members).cloned().collect()
})
.unwrap_or_default(),
);
}
}
}
Type::IntLiteral(_)
| Type::BooleanLiteral(_)
| Type::StringLiteral(_)
| Type::BytesLiteral(_)
| Type::EnumLiteral(_)
| Type::LiteralString
| Type::PropertyInstance(_)
| Type::FunctionLiteral(_)
| Type::BoundMethod(_)
| Type::KnownBoundMethod(_)
| Type::WrapperDescriptor(_)
| Type::DataclassDecorator(_)
| Type::DataclassTransformer(_)
| Type::Callable(_)
| Type::ProtocolInstance(_)
| Type::SpecialForm(_)
| Type::KnownInstance(_)
| Type::BoundSuper(_)
| Type::TypeIs(_)
| Type::TypeGuard(_) => match ty.to_meta_type(db) {
Type::ClassLiteral(class_literal) => {
self.extend_with_class_members(db, ty, class_literal);
}
Type::SubclassOf(subclass_of) => {
if let Some(class) = subclass_of.subclass_of().into_class(db) {
self.extend_with_class_members(db, ty, class.class_literal(db).0);
}
}
Type::GenericAlias(generic_alias) => {
let class_literal = generic_alias.origin(db);
self.extend_with_class_members(db, ty, class_literal);
}
_ => {}
},
Type::TypedDict(_) => {
if let Type::ClassLiteral(class_literal) = ty.to_meta_type(db) {
self.extend_with_class_members(db, ty, class_literal);
}
if let Type::ClassLiteral(class) =
KnownClass::TypedDictFallback.to_class_literal(db)
{
self.extend_with_instance_members(db, ty, class);
}
}
Type::ModuleLiteral(literal) => {
self.extend_with_type(db, KnownClass::ModuleType.to_instance(db));
let module = literal.module(db);
let Some(file) = module.file(db) else {
return;
};
let module_scope = global_scope(db, file);
let use_def_map = use_def_map(db, module_scope);
let place_table = place_table(db, module_scope);
for (symbol_id, _) in use_def_map.all_end_of_scope_symbol_declarations() {
let symbol_name = place_table.symbol(symbol_id).name();
let Place::Defined(ty, _, _, _) =
imported_symbol(db, file, symbol_name, None).place
else {
continue;
};
// Filter private symbols from stubs if they appear to be internal types
let is_stub_file = file.path(db).extension() == Some("pyi");
let is_private_symbol = match NameKind::classify(symbol_name) {
NameKind::Dunder | NameKind::Normal => false,
NameKind::Sunder => true,
};
if is_private_symbol && is_stub_file {
match ty {
Type::NominalInstance(instance)
if matches!(
instance.known_class(db),
Some(
KnownClass::TypeVar
| KnownClass::TypeVarTuple
| KnownClass::ParamSpec
| KnownClass::UnionType
)
) =>
{
continue;
}
Type::ClassLiteral(class) if class.is_protocol(db) => continue,
Type::KnownInstance(
KnownInstanceType::TypeVar(_)
| KnownInstanceType::TypeAliasType(_)
| KnownInstanceType::UnionType(_)
| KnownInstanceType::Literal(_)
| KnownInstanceType::Annotated(_),
) => continue,
_ => {}
}
}
self.members.insert(Member {
name: symbol_name.clone(),
ty,
});
}
self.members
.extend(literal.available_submodule_attributes(db).filter_map(
|submodule_name| {
let ty = literal.resolve_submodule(db, &submodule_name)?;
let name = submodule_name.clone();
Some(Member { name, ty })
},
));
}
}
}
/// Add members from `class_literal` (including following its
/// parent classes).
///
/// `ty` should be the original type that we're adding members for.
/// For example, in:
///
/// ```text
/// class Meta(type):
/// @property
/// def meta_attr(self) -> int:
/// return 0
///
/// class C(metaclass=Meta): ...
///
/// C.<CURSOR>
/// ```
///
/// then `class_literal` might be `Meta`, but `ty` should be the
/// type of `C`. This ensures that the descriptor protocol is
/// correctly used (or not used) to get the type of each member of
/// `C`.
fn extend_with_class_members(
&mut self,
db: &'db dyn Db,
ty: Type<'db>,
class_literal: ClassLiteral<'db>,
) {
for parent in class_literal
.iter_mro(db, None)
.filter_map(ClassBase::into_class)
.map(|class| class.class_literal(db).0)
{
let parent_scope = parent.body_scope(db);
for memberdef in all_end_of_scope_members(db, parent_scope) {
let result = ty.member(db, memberdef.member.name.as_str());
let Some(ty) = result.place.ignore_possibly_undefined() else {
continue;
};
self.members.insert(Member {
name: memberdef.member.name,
ty,
});
}
}
}
fn extend_with_instance_members(
&mut self,
db: &'db dyn Db,
ty: Type<'db>,
class_literal: ClassLiteral<'db>,
) {
for parent in class_literal
.iter_mro(db, None)
.filter_map(ClassBase::into_class)
.map(|class| class.class_literal(db).0)
{
let class_body_scope = parent.body_scope(db);
let file = class_body_scope.file(db);
let index = semantic_index(db, file);
for function_scope_id in attribute_scopes(db, class_body_scope) {
for place_expr in index.place_table(function_scope_id).members() {
let Some(name) = place_expr.as_instance_attribute() else {
continue;
};
let result = ty.member(db, name);
let Some(ty) = result.place.ignore_possibly_undefined() else {
continue;
};
self.members.insert(Member {
name: Name::new(name),
ty,
});
}
}
// This is very similar to `extend_with_class_members`,
// but uses the type of the class instance to query the
// class member. This gets us the right type for each
// member, e.g., `SomeClass.__delattr__` is not a bound
// method, but `instance_of_SomeClass.__delattr__` is.
for memberdef in all_end_of_scope_members(db, class_body_scope) {
let result = ty.member(db, memberdef.member.name.as_str());
let Some(ty) = result.place.ignore_possibly_undefined() else {
continue;
};
self.members.insert(Member {
name: memberdef.member.name,
ty,
});
}
}
}
fn extend_with_synthetic_members(
&mut self,
db: &'db dyn Db,
ty: Type<'db>,
class_literal: ClassLiteral<'db>,
specialization: Option<Specialization<'db>>,
) {
match CodeGeneratorKind::from_class(db, class_literal, specialization) {
Some(CodeGeneratorKind::NamedTuple) => {
if ty.is_nominal_instance() {
self.extend_with_type(db, KnownClass::NamedTupleFallback.to_instance(db));
} else {
self.extend_with_type(db, KnownClass::NamedTupleFallback.to_class_literal(db));
}
}
Some(CodeGeneratorKind::TypedDict) => {}
Some(CodeGeneratorKind::DataclassLike(_)) => {
for attr in SYNTHETIC_DATACLASS_ATTRIBUTES {
if let Place::Defined(synthetic_member, _, _, _) = ty.member(db, attr).place {
self.members.insert(Member {
name: Name::from(*attr),
ty: synthetic_member,
});
}
}
}
None => {}
}
}
}
/// A member of a type or scope, with the first reachable definition of that member.
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct MemberWithDefinition<'db> {
pub member: Member<'db>,
pub first_reachable_definition: Definition<'db>,
}
/// A member of a type or scope.
///
/// In the context of the [`all_members`] routine, this represents
/// a single item in (ideally) the list returned by `dir(object)`.
///
/// The equality, comparison and hashing traits implemented for
/// this type are done so by taking only the name into account. At
/// present, this is because we assume the name is enough to uniquely
/// identify each attribute on an object. This is perhaps complicated
/// by overloads, but they only get represented by one member for
/// now. Moreover, it is convenient to be able to sort collections of
/// members, and a [`Type`] currently (as of 2025-07-09) has no way to do
/// ordered comparisons.
#[derive(Clone, Debug)]
pub struct Member<'db> {
pub name: Name,
pub ty: Type<'db>,
}
impl std::hash::Hash for Member<'_> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.name.hash(state);
}
}
impl Eq for Member<'_> {}
impl<'db> PartialEq for Member<'db> {
fn eq(&self, rhs: &Member<'db>) -> bool {
self.name == rhs.name
}
}
impl<'db> Ord for Member<'db> {
fn cmp(&self, rhs: &Member<'db>) -> Ordering {
self.name.cmp(&rhs.name)
}
}
impl<'db> PartialOrd for Member<'db> {
fn partial_cmp(&self, rhs: &Member<'db>) -> Option<Ordering> {
Some(self.cmp(rhs))
}
}
/// List all members of a given type: anything that would be valid when accessed
/// as an attribute on an object of the given type.
pub fn all_members<'db>(db: &'db dyn Db, ty: Type<'db>) -> FxHashSet<Member<'db>> {
AllMembers::of(db, ty).members
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/newtype.rs | crates/ty_python_semantic/src/types/newtype.rs | use std::collections::BTreeSet;
use crate::Db;
use crate::semantic_index::definition::{Definition, DefinitionKind};
use crate::types::constraints::ConstraintSet;
use crate::types::{ClassType, KnownUnion, Type, definition_expression_type, visitor};
use ruff_db::parsed::parsed_module;
use ruff_python_ast as ast;
/// A `typing.NewType` declaration, either from the perspective of the
/// identity-callable-that-acts-like-a-subtype-in-type-expressions returned by the call to
/// `typing.NewType(...)`, or from the perspective of instances of that subtype returned by the
/// identity callable. For example:
///
/// ```py
/// import typing
/// Foo = typing.NewType("Foo", int)
/// x = Foo(42)
/// ```
///
/// The revealed types there are:
/// - `typing.NewType`: `Type::ClassLiteral(ClassLiteral)` with `KnownClass::NewType`.
/// - `Foo`: `Type::KnownInstance(KnownInstanceType::NewType(NewType { .. }))`
/// - `x`: `Type::NewTypeInstance(NewType { .. })`
///
/// # Ordering
/// Ordering is based on the newtype's salsa-assigned id and not on its values.
/// The id may change between runs, or when the newtype was garbage collected and recreated.
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
#[derive(PartialOrd, Ord)]
pub struct NewType<'db> {
/// The name of this NewType (e.g. `"Foo"`)
#[returns(ref)]
pub name: ast::name::Name,
/// The binding where this NewType is first created.
pub definition: Definition<'db>,
// The base type of this NewType, if it's eagerly specified. This is typically `None` when a
// `NewType` is first encountered, because the base type is lazy/deferred to avoid panics in
// the recursive case. This becomes `Some` when a `NewType` is modified by methods like
// `.normalize()`. Callers should use the `base` method instead of accessing this field
// directly.
eager_base: Option<NewTypeBase<'db>>,
}
impl get_size2::GetSize for NewType<'_> {}
#[salsa::tracked]
impl<'db> NewType<'db> {
pub fn base(self, db: &'db dyn Db) -> NewTypeBase<'db> {
match self.eager_base(db) {
Some(base) => base,
None => self.lazy_base(db),
}
}
#[salsa::tracked(
cycle_initial=lazy_base_cycle_initial,
heap_size=ruff_memory_usage::heap_size
)]
fn lazy_base(self, db: &'db dyn Db) -> NewTypeBase<'db> {
// `TypeInferenceBuilder` emits diagnostics for invalid `NewType` definitions that show up
// in assignments, but invalid definitions still get here, and also `NewType` might show up
// in places that aren't definitions at all. Fall back to `object` in all error cases.
let object_fallback = NewTypeBase::ClassType(ClassType::object(db));
let definition = self.definition(db);
let module = parsed_module(db, definition.file(db)).load(db);
let DefinitionKind::Assignment(assignment) = definition.kind(db) else {
return object_fallback;
};
let Some(call_expr) = assignment.value(&module).as_call_expr() else {
return object_fallback;
};
let Some(second_arg) = call_expr.arguments.args.get(1) else {
return object_fallback;
};
match definition_expression_type(db, definition, second_arg) {
Type::NominalInstance(nominal_instance_type) => {
NewTypeBase::ClassType(nominal_instance_type.class(db))
}
Type::NewTypeInstance(newtype) => NewTypeBase::NewType(newtype),
// There are exactly two union types allowed as bases for NewType: `int | float` and
// `int | float | complex`. These are allowed because that's what `float` and `complex`
// expand into in type position. We don't currently ask whether the union was implicit
// or explicit, so the explicit version is also allowed.
Type::Union(union_type) => match union_type.known(db) {
Some(KnownUnion::Float) => NewTypeBase::Float,
Some(KnownUnion::Complex) => NewTypeBase::Complex,
_ => object_fallback,
},
_ => object_fallback,
}
}
fn iter_bases(self, db: &'db dyn Db) -> NewTypeBaseIter<'db> {
NewTypeBaseIter {
current: Some(self),
seen_before: BTreeSet::new(),
db,
}
}
// Walk the `NewTypeBase` chain to find the underlying non-newtype `Type`. There might not be
// one if this `NewType` is cyclical, and we fall back to `object` in that case.
pub fn concrete_base_type(self, db: &'db dyn Db) -> Type<'db> {
for base in self.iter_bases(db) {
match base {
NewTypeBase::NewType(_) => continue,
concrete => return concrete.instance_type(db),
}
}
Type::object()
}
pub(crate) fn is_equivalent_to_impl(self, db: &'db dyn Db, other: Self) -> bool {
// Two instances of the "same" `NewType` won't compare == if one of them has an eagerly
// evaluated base (or a normalized base, etc.) and the other doesn't, so we only check for
// equality of the `definition`.
self.definition(db) == other.definition(db)
}
// Since a regular class can't inherit from a newtype, the only way for one newtype to be a
// subtype of another is to have the other in its chain of newtype bases. Once we reach the
// base class, we don't have to keep looking.
pub(crate) fn has_relation_to_impl(self, db: &'db dyn Db, other: Self) -> ConstraintSet<'db> {
if self.is_equivalent_to_impl(db, other) {
return ConstraintSet::from(true);
}
for base in self.iter_bases(db) {
if let NewTypeBase::NewType(base_newtype) = base {
if base_newtype.is_equivalent_to_impl(db, other) {
return ConstraintSet::from(true);
}
}
}
ConstraintSet::from(false)
}
pub(crate) fn is_disjoint_from_impl(self, db: &'db dyn Db, other: Self) -> ConstraintSet<'db> {
// Two NewTypes are disjoint if they're not equal and neither inherits from the other.
// NewTypes have single inheritance, and a regular class can't inherit from a NewType, so
// it's not possible for some third type to multiply-inherit from both.
let mut self_not_subtype_of_other = self.has_relation_to_impl(db, other).negate(db);
let other_not_subtype_of_self = other.has_relation_to_impl(db, self).negate(db);
self_not_subtype_of_other.intersect(db, other_not_subtype_of_self)
}
/// Create a new `NewType` by mapping the underlying `ClassType`. This descends through any
/// number of nested `NewType` layers and rebuilds the whole chain. In the rare case of cyclic
/// `NewType`s with no underlying `ClassType`, this has no effect and does not call `f`.
pub(crate) fn try_map_base_class_type(
self,
db: &'db dyn Db,
f: impl FnOnce(ClassType<'db>) -> Option<ClassType<'db>>,
) -> Option<Self> {
// Modifying the base class type requires unwrapping and re-wrapping however many base
// newtypes there are between here and there. Normally recursion would be natural for this,
// but the bases iterator does cycle detection, and I think using that with a stack is a
// little cleaner than conjuring up yet another `CycleDetector` visitor and yet another
// layer of "*_impl" nesting. Also if there is no base class type, returning `self`
// unmodified seems more correct than injecting some default type like `object` into the
// cycle, which is what `CycleDetector` would do if we used it here.
let mut inner_newtype_stack = Vec::new();
for base in self.iter_bases(db) {
match base {
// Build up the stack of intermediate newtypes that we'll need to re-wrap after
// we've mapped the `ClassType`.
NewTypeBase::NewType(base_newtype) => inner_newtype_stack.push(base_newtype),
// We've reached the `ClassType`.
NewTypeBase::ClassType(base_class_type) => {
// Call `f`.
let mut mapped_base = NewTypeBase::ClassType(f(base_class_type)?);
// Re-wrap the mapped base class in however many newtypes we unwrapped.
for inner_newtype in inner_newtype_stack.into_iter().rev() {
mapped_base = NewTypeBase::NewType(NewType::new(
db,
inner_newtype.name(db).clone(),
inner_newtype.definition(db),
Some(mapped_base),
));
}
return Some(NewType::new(
db,
self.name(db).clone(),
self.definition(db),
Some(mapped_base),
));
}
// Mapping base class types is used for normalization and applying type mappings,
// neither of which have any effect on `float` or `complex` (which are already
// fully normalized and non-generic), so we don't need to bother calling `f`.
NewTypeBase::Float | NewTypeBase::Complex => {}
}
}
// If we get here, there is no `ClassType` (because this newtype is either float/complex or
// cyclic), and we don't call `f` at all.
Some(self)
}
pub(crate) fn map_base_class_type(
self,
db: &'db dyn Db,
f: impl FnOnce(ClassType<'db>) -> ClassType<'db>,
) -> Self {
self.try_map_base_class_type(db, |class_type| Some(f(class_type)))
.unwrap()
}
}
pub(crate) fn walk_newtype_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
db: &'db dyn Db,
newtype: NewType<'db>,
visitor: &V,
) {
visitor.visit_type(db, newtype.base(db).instance_type(db));
}
/// `typing.NewType` typically wraps a class type, but it can also wrap another newtype.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)]
pub enum NewTypeBase<'db> {
ClassType(ClassType<'db>),
NewType(NewType<'db>),
// `float` and `complex` are special-cased in type position, where they refer to `int | float`
// and `int | float | complex` respectively. As an extension of that special case, we allow
// them in `NewType` bases, even though unions and other typing constructs normally aren't
// allowed.
Float,
Complex,
}
impl<'db> NewTypeBase<'db> {
pub fn instance_type(self, db: &'db dyn Db) -> Type<'db> {
match self {
NewTypeBase::ClassType(class_type) => Type::instance(db, class_type),
NewTypeBase::NewType(newtype) => Type::NewTypeInstance(newtype),
NewTypeBase::Float => KnownUnion::Float.to_type(db),
NewTypeBase::Complex => KnownUnion::Complex.to_type(db),
}
}
}
/// An iterator over the transitive bases of a `NewType`. In the most common case, e.g.
/// `Foo = NewType("Foo", int)`, this yields the one `NewTypeBase::ClassType` (e.g. `int`). For
/// newtypes that wrap other newtypes, this iterator yields the `NewTypeBase::NewType`s (not
/// including `self`) before finally yielding the `NewTypeBase::ClassType`. In the pathological
/// case of cyclic newtypes like `Foo = NewType("Foo", "Foo")`, this iterator yields the unique
/// `NewTypeBase::NewType`s (not including `self`), detects the cycle, and then stops.
///
/// Note that this does *not* detect indirect cycles that go through a proper class, like this:
/// ```py
/// Foo = NewType("Foo", list["Foo"])
/// ```
/// As far as this iterator is concerned, that's the "common case", and it yields the one
/// `NewTypeBase::ClassType` for `list[Foo]`. Functions like `normalize` that continue recursing
/// over the base class need to pass down a cycle-detecting visitor as usual.
struct NewTypeBaseIter<'db> {
current: Option<NewType<'db>>,
seen_before: BTreeSet<NewType<'db>>,
db: &'db dyn Db,
}
impl<'db> Iterator for NewTypeBaseIter<'db> {
type Item = NewTypeBase<'db>;
fn next(&mut self) -> Option<Self::Item> {
let current = self.current?;
match current.base(self.db) {
NewTypeBase::NewType(base_newtype) => {
// Doing the insertion only in this branch avoids allocating in the common case.
self.seen_before.insert(current);
if self.seen_before.contains(&base_newtype) {
// Cycle detected. Stop iterating.
self.current = None;
None
} else {
self.current = Some(base_newtype);
Some(NewTypeBase::NewType(base_newtype))
}
}
concrete_base => {
self.current = None;
Some(concrete_base)
}
}
}
}
fn lazy_base_cycle_initial<'db>(
db: &'db dyn Db,
_id: salsa::Id,
_self: NewType<'db>,
) -> NewTypeBase<'db> {
NewTypeBase::ClassType(ClassType::object(db))
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/infer.rs | crates/ty_python_semantic/src/types/infer.rs | //! We have Salsa queries for inferring types at three different granularities: scope-level,
//! definition-level, and expression-level.
//!
//! Scope-level inference is for when we are actually checking a file, and need to check types for
//! everything in that file's scopes, or give a linter access to types of arbitrary expressions
//! (via the [`HasType`](crate::semantic_model::HasType) trait).
//!
//! Definition-level inference allows us to look up the types of places in other scopes (e.g. for
//! imports) with the minimum inference necessary, so that if we're looking up one place from a
//! very large module, we can avoid a bunch of unnecessary work. Definition-level inference also
//! allows us to handle import cycles without getting into a cycle of scope-level inference
//! queries.
//!
//! The expression-level inference query is needed in only a few cases. Since some assignments can
//! have multiple targets (via `x = y = z` or unpacking `(x, y) = z`, they can be associated with
//! multiple definitions (one per assigned place). In order to avoid inferring the type of the
//! right-hand side once per definition, we infer it as a standalone query, so its result will be
//! cached by Salsa. We also need the expression-level query for inferring types in type guard
//! expressions (e.g. the test clause of an `if` statement.)
//!
//! Inferring types at any of the three region granularities returns a [`ExpressionInference`],
//! [`DefinitionInference`], or [`ScopeInference`], which hold the types for every expression
//! within the inferred region. Some inference types also expose the type of every definition
//! within the inferred region.
//!
//! Some type expressions can require deferred evaluation. This includes all type expressions in
//! stub files, or annotation expressions in modules with `from __future__ import annotations`, or
//! stringified annotations. We have a fourth Salsa query for inferring the deferred types
//! associated with a particular definition. Scope-level inference infers deferred types for all
//! definitions once the rest of the types in the scope have been inferred.
//!
//! Many of our type inference Salsa queries implement cycle recovery via fixed-point iteration. In
//! general, they initiate fixed-point iteration by returning an `Inference` type that returns
//! the `Divergent` type for all expressions, bindings, and declarations, and then they continue iterating
//! the query cycle until a fixed-point is reached. Salsa has a built-in fixed limit on the number
//! of iterations, so if we fail to converge, Salsa will eventually panic. (This should of course
//! be considered a bug.)
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
use ruff_text_size::Ranged;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa;
use salsa::plumbing::AsId;
use crate::Db;
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::scope::ScopeId;
use crate::semantic_index::{SemanticIndex, semantic_index};
use crate::types::diagnostic::TypeCheckDiagnostics;
use crate::types::function::FunctionType;
use crate::types::generics::Specialization;
use crate::types::unpacker::{UnpackResult, Unpacker};
use crate::types::{
ClassLiteral, KnownClass, Truthiness, Type, TypeAndQualifiers, declaration_type,
};
use crate::unpack::Unpack;
use builder::TypeInferenceBuilder;
pub(super) use builder::UnsupportedComparisonError;
mod builder;
#[cfg(test)]
mod tests;
/// Infer all types for a [`ScopeId`], including all definitions and expressions in that scope.
/// Use when checking a scope, or needing to provide a type for an arbitrary expression in the
/// scope.
#[salsa::tracked(returns(ref), cycle_fn=scope_cycle_recover, cycle_initial=scope_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> ScopeInference<'db> {
let file = scope.file(db);
let _span = tracing::trace_span!("infer_scope_types", scope=?scope.as_id(), ?file).entered();
let module = parsed_module(db, file).load(db);
// Using the index here is fine because the code below depends on the AST anyway.
// The isolation of the query is by the return inferred types.
let index = semantic_index(db, file);
TypeInferenceBuilder::new(db, InferenceRegion::Scope(scope), index, &module).finish_scope()
}
fn scope_cycle_recover<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_inference: &ScopeInference<'db>,
inference: ScopeInference<'db>,
_scope: ScopeId<'db>,
) -> ScopeInference<'db> {
inference.cycle_normalized(db, previous_inference, cycle)
}
fn scope_cycle_initial<'db>(
_db: &'db dyn Db,
id: salsa::Id,
_scope: ScopeId<'db>,
) -> ScopeInference<'db> {
ScopeInference::cycle_initial(Type::divergent(id))
}
/// Infer all types for a [`Definition`] (including sub-expressions).
/// Use when resolving a place use or public type of a place.
#[salsa::tracked(returns(ref), cycle_fn=definition_cycle_recover, cycle_initial=definition_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn infer_definition_types<'db>(
db: &'db dyn Db,
definition: Definition<'db>,
) -> DefinitionInference<'db> {
let file = definition.file(db);
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!(
"infer_definition_types",
range = ?definition.kind(db).target_range(&module),
?file
)
.entered();
let index = semantic_index(db, file);
TypeInferenceBuilder::new(db, InferenceRegion::Definition(definition), index, &module)
.finish_definition()
}
fn definition_cycle_recover<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_inference: &DefinitionInference<'db>,
inference: DefinitionInference<'db>,
_definition: Definition<'db>,
) -> DefinitionInference<'db> {
inference.cycle_normalized(db, previous_inference, cycle)
}
fn definition_cycle_initial<'db>(
db: &'db dyn Db,
id: salsa::Id,
definition: Definition<'db>,
) -> DefinitionInference<'db> {
DefinitionInference::cycle_initial(definition.scope(db), Type::divergent(id))
}
/// Infer types for all deferred type expressions in a [`Definition`].
///
/// Deferred expressions are type expressions (annotations, base classes, aliases...) in a stub
/// file, or in a file with `from __future__ import annotations`, or stringified annotations.
#[salsa::tracked(returns(ref), cycle_fn=deferred_cycle_recovery, cycle_initial=deferred_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn infer_deferred_types<'db>(
db: &'db dyn Db,
definition: Definition<'db>,
) -> DefinitionInference<'db> {
let file = definition.file(db);
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!(
"infer_deferred_types",
definition = ?definition.as_id(),
range = ?definition.kind(db).target_range(&module),
?file
)
.entered();
let index = semantic_index(db, file);
TypeInferenceBuilder::new(db, InferenceRegion::Deferred(definition), index, &module)
.finish_definition()
}
fn deferred_cycle_recovery<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_inference: &DefinitionInference<'db>,
inference: DefinitionInference<'db>,
_definition: Definition<'db>,
) -> DefinitionInference<'db> {
inference.cycle_normalized(db, previous_inference, cycle)
}
fn deferred_cycle_initial<'db>(
db: &'db dyn Db,
id: salsa::Id,
definition: Definition<'db>,
) -> DefinitionInference<'db> {
DefinitionInference::cycle_initial(definition.scope(db), Type::divergent(id))
}
/// Infer all types for an [`Expression`] (including sub-expressions).
/// Use rarely; only for cases where we'd otherwise risk double-inferring an expression: RHS of an
/// assignment, which might be unpacking/multi-target and thus part of multiple definitions, or a
/// type narrowing guard expression (e.g. if statement test node).
pub(crate) fn infer_expression_types<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
tcx: TypeContext<'db>,
) -> &'db ExpressionInference<'db> {
infer_expression_types_impl(db, InferExpression::new(db, expression, tcx))
}
#[salsa::tracked(returns(ref), cycle_fn=expression_cycle_recover, cycle_initial=expression_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
pub(super) fn infer_expression_types_impl<'db>(
db: &'db dyn Db,
input: InferExpression<'db>,
) -> ExpressionInference<'db> {
let (expression, tcx) = (input.expression(db), input.tcx(db));
let file = expression.file(db);
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!(
"infer_expression_types",
expression = ?expression.as_id(),
range = ?expression.node_ref(db, &module).range(),
?file
)
.entered();
let index = semantic_index(db, file);
TypeInferenceBuilder::new(
db,
InferenceRegion::Expression(expression, tcx),
index,
&module,
)
.finish_expression()
}
fn expression_cycle_recover<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_inference: &ExpressionInference<'db>,
inference: ExpressionInference<'db>,
_input: InferExpression<'db>,
) -> ExpressionInference<'db> {
inference.cycle_normalized(db, previous_inference, cycle)
}
fn expression_cycle_initial<'db>(
db: &'db dyn Db,
id: salsa::Id,
input: InferExpression<'db>,
) -> ExpressionInference<'db> {
let cycle_recovery = Type::divergent(id);
ExpressionInference::cycle_initial(input.expression(db).scope(db), cycle_recovery)
}
/// Infers the type of an `expression` that is guaranteed to be in the same file as the calling query.
///
/// This is a small helper around [`infer_expression_types()`] to reduce the boilerplate.
/// Use [`infer_expression_type()`] if it isn't guaranteed that `expression` is in the same file to
/// avoid cross-file query dependencies.
pub(crate) fn infer_same_file_expression_type<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
tcx: TypeContext<'db>,
parsed: &ParsedModuleRef,
) -> Type<'db> {
let inference = infer_expression_types(db, expression, tcx);
inference.expression_type(expression.node_ref(db, parsed))
}
/// Infers the type of an expression where the expression might come from another file.
///
/// Use this over [`infer_expression_types`] if the expression might come from another file than the
/// enclosing query to avoid cross-file query dependencies.
///
/// Use [`infer_same_file_expression_type`] if it is guaranteed that `expression` is in the same
/// to avoid unnecessary salsa ingredients. This is normally the case inside the `TypeInferenceBuilder`.
pub(crate) fn infer_expression_type<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
tcx: TypeContext<'db>,
) -> Type<'db> {
infer_expression_type_impl(db, InferExpression::new(db, expression, tcx))
}
#[salsa::tracked(cycle_fn=single_expression_cycle_recover, cycle_initial=single_expression_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
fn infer_expression_type_impl<'db>(db: &'db dyn Db, input: InferExpression<'db>) -> Type<'db> {
let file = input.expression(db).file(db);
let module = parsed_module(db, file).load(db);
// It's okay to call the "same file" version here because we're inside a salsa query.
let inference = infer_expression_types_impl(db, input);
inference.expression_type(input.expression(db).node_ref(db, &module))
}
fn single_expression_cycle_recover<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_cycle_value: &Type<'db>,
result: Type<'db>,
_input: InferExpression<'db>,
) -> Type<'db> {
result.cycle_normalized(db, *previous_cycle_value, cycle)
}
fn single_expression_cycle_initial<'db>(
_db: &'db dyn Db,
id: salsa::Id,
_input: InferExpression<'db>,
) -> Type<'db> {
Type::divergent(id)
}
/// An `Expression` with an optional `TypeContext`.
///
/// This is a Salsa supertype used as the input to `infer_expression_types` to avoid
/// interning an `ExpressionWithContext` unnecessarily when no type context is provided.
#[derive(Debug, Clone, Copy, Eq, Hash, PartialEq, salsa::Supertype, salsa::Update)]
pub(super) enum InferExpression<'db> {
Bare(Expression<'db>),
WithContext(ExpressionWithContext<'db>),
}
impl<'db> InferExpression<'db> {
pub(super) fn new(
db: &'db dyn Db,
expression: Expression<'db>,
tcx: TypeContext<'db>,
) -> InferExpression<'db> {
if tcx.annotation.is_some() {
InferExpression::WithContext(ExpressionWithContext::new(db, expression, tcx))
} else {
// Drop the empty `TypeContext` to avoid the interning cost.
InferExpression::Bare(expression)
}
}
fn expression(self, db: &'db dyn Db) -> Expression<'db> {
match self {
InferExpression::Bare(expression) => expression,
InferExpression::WithContext(expression_with_context) => {
expression_with_context.expression(db)
}
}
}
fn tcx(self, db: &'db dyn Db) -> TypeContext<'db> {
match self {
InferExpression::Bare(_) => TypeContext::default(),
InferExpression::WithContext(expression_with_context) => {
expression_with_context.tcx(db)
}
}
}
}
/// An `Expression` with a `TypeContext`.
#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)]
pub(super) struct ExpressionWithContext<'db> {
expression: Expression<'db>,
tcx: TypeContext<'db>,
}
/// The type context for a given expression, namely the type annotation
/// in an annotated assignment.
///
/// Knowing the outer type context when inferring an expression can enable
/// more precise inference results, aka "bidirectional type inference".
#[derive(Default, Copy, Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize, salsa::Update)]
pub(crate) struct TypeContext<'db> {
pub(crate) annotation: Option<Type<'db>>,
}
impl<'db> TypeContext<'db> {
pub(crate) fn new(annotation: Option<Type<'db>>) -> Self {
Self { annotation }
}
// If the type annotation is a specialized instance of the given `KnownClass`, returns the
// specialization.
fn known_specialization(
&self,
db: &'db dyn Db,
known_class: KnownClass,
) -> Option<Specialization<'db>> {
self.annotation
.and_then(|ty| ty.known_specialization(db, known_class))
}
pub(crate) fn map(self, f: impl FnOnce(Type<'db>) -> Type<'db>) -> Self {
Self {
annotation: self.annotation.map(f),
}
}
pub(crate) fn is_typealias(&self) -> bool {
self.annotation
.is_some_and(|ty| ty.is_typealias_special_form())
}
}
/// Returns the statically-known truthiness of a given expression.
///
/// Returns [`Truthiness::Ambiguous`] in case any non-definitely bound places
/// were encountered while inferring the type of the expression.
#[salsa::tracked(cycle_initial=static_expression_truthiness_cycle_initial, heap_size=get_size2::GetSize::get_heap_size)]
pub(crate) fn static_expression_truthiness<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
) -> Truthiness {
let inference = infer_expression_types_impl(db, InferExpression::Bare(expression));
if !inference.all_places_definitely_bound() {
return Truthiness::Ambiguous;
}
let file = expression.file(db);
let module = parsed_module(db, file).load(db);
let node = expression.node_ref(db, &module);
inference.expression_type(node).bool(db)
}
fn static_expression_truthiness_cycle_initial<'db>(
_db: &'db dyn Db,
_id: salsa::Id,
_expression: Expression<'db>,
) -> Truthiness {
Truthiness::Ambiguous
}
/// Infer the types for an [`Unpack`] operation.
///
/// This infers the expression type and performs structural match against the target expression
/// involved in an unpacking operation. It returns a result-like object that can be used to get the
/// type of the variables involved in this unpacking along with any violations that are detected
/// during this unpacking.
#[salsa::tracked(returns(ref), cycle_fn=unpack_cycle_recover, cycle_initial=unpack_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
pub(super) fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult<'db> {
let file = unpack.file(db);
let module = parsed_module(db, file).load(db);
let _span = tracing::trace_span!("infer_unpack_types", range=?unpack.range(db, &module), ?file)
.entered();
let mut unpacker = Unpacker::new(db, unpack.target_scope(db), &module);
unpacker.unpack(unpack.target(db, &module), unpack.value(db));
unpacker.finish()
}
fn unpack_cycle_initial<'db>(
_db: &'db dyn Db,
id: salsa::Id,
_unpack: Unpack<'db>,
) -> UnpackResult<'db> {
UnpackResult::cycle_initial(Type::divergent(id))
}
fn unpack_cycle_recover<'db>(
db: &'db dyn Db,
cycle: &salsa::Cycle,
previous_cycle_result: &UnpackResult<'db>,
result: UnpackResult<'db>,
_unpack: Unpack<'db>,
) -> UnpackResult<'db> {
result.cycle_normalized(db, previous_cycle_result, cycle)
}
/// Returns the type of the nearest enclosing class for the given scope.
///
/// This function walks up the ancestor scopes starting from the given scope,
/// and finds the closest class definition. This is different to the behaviour of
/// [`TypeInferenceBuilder::class_context_of_current_method`], which will only return
/// `Some(class)` if either the immediate parent scope is a class OR the immediate parent
/// scope is a type-parameters scope and the grandparent scope is a class.
///
/// Returns `None` if no enclosing class is found.
pub(crate) fn nearest_enclosing_class<'db>(
db: &'db dyn Db,
semantic: &SemanticIndex<'db>,
scope: ScopeId,
) -> Option<ClassLiteral<'db>> {
semantic
.ancestor_scopes(scope.file_scope_id(db))
.find_map(|(_, ancestor_scope)| {
let class = ancestor_scope.node().as_class()?;
let definition = semantic.expect_single_definition(class);
declaration_type(db, definition)
.inner_type()
.as_class_literal()
})
}
/// Returns the type of the nearest enclosing function for the given scope.
///
/// This function walks up the ancestor scopes starting from the given scope,
/// and finds the closest (non-lambda) function definition.
///
/// Returns `None` if no enclosing function is found.
pub(crate) fn nearest_enclosing_function<'db>(
db: &'db dyn Db,
semantic: &SemanticIndex<'db>,
scope: ScopeId,
) -> Option<FunctionType<'db>> {
semantic
.ancestor_scopes(scope.file_scope_id(db))
.find_map(|(_, ancestor_scope)| {
let func = ancestor_scope.node().as_function()?;
let definition = semantic.expect_single_definition(func);
let inference = infer_definition_types(db, definition);
inference
.undecorated_type()
.unwrap_or_else(|| inference.declaration_type(definition).inner_type())
.as_function_literal()
})
}
/// A region within which we can infer types.
#[derive(Copy, Clone, Debug)]
pub(crate) enum InferenceRegion<'db> {
/// infer types for a standalone [`Expression`]
Expression(Expression<'db>, TypeContext<'db>),
/// infer types for a [`Definition`]
Definition(Definition<'db>),
/// infer deferred types for a [`Definition`]
Deferred(Definition<'db>),
/// infer types for an entire [`ScopeId`]
Scope(ScopeId<'db>),
}
impl<'db> InferenceRegion<'db> {
fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
match self {
InferenceRegion::Expression(expression, _) => expression.scope(db),
InferenceRegion::Definition(definition) | InferenceRegion::Deferred(definition) => {
definition.scope(db)
}
InferenceRegion::Scope(scope) => scope,
}
}
}
/// The inferred types for a scope region.
#[derive(Debug, Eq, PartialEq, salsa::Update, get_size2::GetSize)]
pub(crate) struct ScopeInference<'db> {
/// The types of every expression in this region.
expressions: FxHashMap<ExpressionNodeKey, Type<'db>>,
/// The extra data that is only present for few inference regions.
extra: Option<Box<ScopeInferenceExtra<'db>>>,
}
#[derive(Debug, Eq, PartialEq, get_size2::GetSize, salsa::Update, Default)]
struct ScopeInferenceExtra<'db> {
/// String annotations found in this region
string_annotations: FxHashSet<ExpressionNodeKey>,
/// The fallback type for missing expressions/bindings/declarations or recursive type inference.
cycle_recovery: Option<Type<'db>>,
/// The diagnostics for this region.
diagnostics: TypeCheckDiagnostics,
}
impl<'db> ScopeInference<'db> {
fn cycle_initial(cycle_recovery: Type<'db>) -> Self {
Self {
extra: Some(Box::new(ScopeInferenceExtra {
cycle_recovery: Some(cycle_recovery),
..ScopeInferenceExtra::default()
})),
expressions: FxHashMap::default(),
}
}
fn cycle_normalized(
mut self,
db: &'db dyn Db,
previous_inference: &ScopeInference<'db>,
cycle: &salsa::Cycle,
) -> ScopeInference<'db> {
for (expr, ty) in &mut self.expressions {
let previous_ty = previous_inference.expression_type(*expr);
*ty = ty.cycle_normalized(db, previous_ty, cycle);
}
self
}
pub(crate) fn diagnostics(&self) -> Option<&TypeCheckDiagnostics> {
self.extra.as_deref().map(|extra| &extra.diagnostics)
}
pub(crate) fn expression_type(&self, expression: impl Into<ExpressionNodeKey>) -> Type<'db> {
self.try_expression_type(expression)
.unwrap_or_else(Type::unknown)
}
pub(crate) fn try_expression_type(
&self,
expression: impl Into<ExpressionNodeKey>,
) -> Option<Type<'db>> {
self.expressions
.get(&expression.into())
.copied()
.or_else(|| self.fallback_type())
}
fn fallback_type(&self) -> Option<Type<'db>> {
self.extra.as_ref().and_then(|extra| extra.cycle_recovery)
}
/// Returns whether the given expression is a string annotation
/// (the string in `x: "int | None"`).
pub(crate) fn is_string_annotation(&self, expression: impl Into<ExpressionNodeKey>) -> bool {
let Some(extra) = &self.extra else {
return false;
};
extra.string_annotations.contains(&expression.into())
}
}
/// The inferred types for a definition region.
#[derive(Debug, Eq, PartialEq, salsa::Update, get_size2::GetSize)]
pub(crate) struct DefinitionInference<'db> {
/// The types of every expression in this region.
expressions: FxHashMap<ExpressionNodeKey, Type<'db>>,
/// The scope this region is part of.
#[cfg(debug_assertions)]
scope: ScopeId<'db>,
/// The types of every binding in this region.
///
/// Almost all definition regions have less than 10 bindings. There are very few with more than 10 (but still less than 20).
/// Because of that, use a slice with linear search over a hash map.
pub(crate) bindings: Box<[(Definition<'db>, Type<'db>)]>,
/// The types and type qualifiers of every declaration in this region.
///
/// About 50% of the definition inference regions have no declarations.
/// The other 50% have less than 10 declarations. Because of that, use a
/// slice with linear search over a hash map.
declarations: Box<[(Definition<'db>, TypeAndQualifiers<'db>)]>,
/// The extra data that is only present for few inference regions.
extra: Option<Box<DefinitionInferenceExtra<'db>>>,
}
#[derive(Debug, Eq, PartialEq, get_size2::GetSize, salsa::Update, Default)]
struct DefinitionInferenceExtra<'db> {
/// String annotations found in this region
string_annotations: FxHashSet<ExpressionNodeKey>,
/// The fallback type for missing expressions/bindings/declarations or recursive type inference.
cycle_recovery: Option<Type<'db>>,
/// The definitions that have some deferred parts.
deferred: Box<[Definition<'db>]>,
/// The diagnostics for this region.
diagnostics: TypeCheckDiagnostics,
/// For function definitions, the undecorated type of the function.
undecorated_type: Option<Type<'db>>,
}
impl<'db> DefinitionInference<'db> {
fn cycle_initial(scope: ScopeId<'db>, cycle_recovery: Type<'db>) -> Self {
let _ = scope;
Self {
expressions: FxHashMap::default(),
bindings: Box::default(),
declarations: Box::default(),
#[cfg(debug_assertions)]
scope,
extra: Some(Box::new(DefinitionInferenceExtra {
cycle_recovery: Some(cycle_recovery),
..DefinitionInferenceExtra::default()
})),
}
}
fn cycle_normalized(
mut self,
db: &'db dyn Db,
previous_inference: &DefinitionInference<'db>,
cycle: &salsa::Cycle,
) -> DefinitionInference<'db> {
for (expr, ty) in &mut self.expressions {
let previous_ty = previous_inference.expression_type(*expr);
*ty = ty.cycle_normalized(db, previous_ty, cycle);
}
for (binding, binding_ty) in &mut self.bindings {
if let Some((_, previous_binding)) = previous_inference
.bindings
.iter()
.find(|(previous_binding, _)| previous_binding == binding)
{
*binding_ty = binding_ty.cycle_normalized(db, *previous_binding, cycle);
} else {
*binding_ty = binding_ty.recursive_type_normalized(db, cycle);
}
}
for (declaration, declaration_ty) in &mut self.declarations {
if let Some((_, previous_declaration)) = previous_inference
.declarations
.iter()
.find(|(previous_declaration, _)| previous_declaration == declaration)
{
*declaration_ty = declaration_ty.map_type(|decl_ty| {
decl_ty.cycle_normalized(db, previous_declaration.inner_type(), cycle)
});
} else {
*declaration_ty =
declaration_ty.map_type(|decl_ty| decl_ty.recursive_type_normalized(db, cycle));
}
}
self
}
pub(crate) fn expression_type(&self, expression: impl Into<ExpressionNodeKey>) -> Type<'db> {
self.try_expression_type(expression)
.unwrap_or_else(Type::unknown)
}
pub(crate) fn try_expression_type(
&self,
expression: impl Into<ExpressionNodeKey>,
) -> Option<Type<'db>> {
self.expressions
.get(&expression.into())
.copied()
.or_else(|| self.fallback_type())
}
#[track_caller]
pub(crate) fn binding_type(&self, definition: Definition<'db>) -> Type<'db> {
self.bindings
.iter()
.find_map(
|(def, ty)| {
if def == &definition { Some(*ty) } else { None }
},
)
.or_else(|| self.fallback_type())
.expect(
"definition should belong to this TypeInference region and \
TypeInferenceBuilder should have inferred a type for it",
)
}
fn bindings(&self) -> impl ExactSizeIterator<Item = (Definition<'db>, Type<'db>)> {
self.bindings.iter().copied()
}
#[track_caller]
pub(crate) fn declaration_type(&self, definition: Definition<'db>) -> TypeAndQualifiers<'db> {
self.declarations
.iter()
.find_map(|(def, qualifiers)| {
if def == &definition {
Some(*qualifiers)
} else {
None
}
})
.or_else(|| self.fallback_type().map(TypeAndQualifiers::declared))
.expect(
"definition should belong to this TypeInference region and \
TypeInferenceBuilder should have inferred a type for it",
)
}
fn declarations(
&self,
) -> impl ExactSizeIterator<Item = (Definition<'db>, TypeAndQualifiers<'db>)> {
self.declarations.iter().copied()
}
fn declaration_types(&self) -> impl ExactSizeIterator<Item = TypeAndQualifiers<'db>> {
self.declarations.iter().map(|(_, qualifiers)| *qualifiers)
}
pub(crate) fn fallback_type(&self) -> Option<Type<'db>> {
self.extra.as_ref().and_then(|extra| extra.cycle_recovery)
}
pub(crate) fn undecorated_type(&self) -> Option<Type<'db>> {
self.extra.as_ref().and_then(|extra| extra.undecorated_type)
}
}
/// The inferred types for an expression region.
#[derive(Debug, Eq, PartialEq, salsa::Update, get_size2::GetSize)]
pub(crate) struct ExpressionInference<'db> {
/// The types of every expression in this region.
expressions: FxHashMap<ExpressionNodeKey, Type<'db>>,
extra: Option<Box<ExpressionInferenceExtra<'db>>>,
/// The scope this region is part of.
#[cfg(debug_assertions)]
scope: ScopeId<'db>,
}
/// Extra data that only exists for few inferred expression regions.
#[derive(Debug, Eq, PartialEq, salsa::Update, get_size2::GetSize, Default)]
struct ExpressionInferenceExtra<'db> {
/// String annotations found in this region
string_annotations: FxHashSet<ExpressionNodeKey>,
/// The types of every binding in this expression region.
///
/// Only very few expression regions have bindings (around 0.1%).
bindings: Box<[(Definition<'db>, Type<'db>)]>,
/// The diagnostics for this region.
diagnostics: TypeCheckDiagnostics,
/// The fallback type for missing expressions/bindings/declarations or recursive type inference.
cycle_recovery: Option<Type<'db>>,
/// `true` if all places in this expression are definitely bound
all_definitely_bound: bool,
}
impl<'db> ExpressionInference<'db> {
fn cycle_initial(scope: ScopeId<'db>, cycle_recovery: Type<'db>) -> Self {
let _ = scope;
Self {
extra: Some(Box::new(ExpressionInferenceExtra {
cycle_recovery: Some(cycle_recovery),
all_definitely_bound: true,
..ExpressionInferenceExtra::default()
})),
expressions: FxHashMap::default(),
#[cfg(debug_assertions)]
scope,
}
}
fn cycle_normalized(
mut self,
db: &'db dyn Db,
previous: &ExpressionInference<'db>,
cycle: &salsa::Cycle,
) -> ExpressionInference<'db> {
if let Some(extra) = self.extra.as_mut() {
for (binding, binding_ty) in &mut extra.bindings {
if let Some((_, previous_binding)) = previous.extra.as_deref().and_then(|extra| {
extra
.bindings
.iter()
.find(|(previous_binding, _)| previous_binding == binding)
}) {
*binding_ty = binding_ty.cycle_normalized(db, *previous_binding, cycle);
} else {
*binding_ty = binding_ty.recursive_type_normalized(db, cycle);
}
}
}
for (expr, ty) in &mut self.expressions {
let previous_ty = previous.expression_type(*expr);
*ty = ty.cycle_normalized(db, previous_ty, cycle);
}
self
}
pub(crate) fn try_expression_type(
&self,
expression: impl Into<ExpressionNodeKey>,
) -> Option<Type<'db>> {
self.expressions
.get(&expression.into())
.copied()
.or_else(|| self.fallback_type())
}
pub(crate) fn expression_type(&self, expression: impl Into<ExpressionNodeKey>) -> Type<'db> {
self.try_expression_type(expression)
.unwrap_or_else(Type::unknown)
}
fn fallback_type(&self) -> Option<Type<'db>> {
self.extra.as_ref().and_then(|extra| extra.cycle_recovery)
}
/// Returns true if all places in this expression are definitely bound.
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | true |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/cyclic.rs | crates/ty_python_semantic/src/types/cyclic.rs | //! Cycle detection for recursive types.
//!
//! The visitors here (`TypeTransformer` and `PairVisitor`) are used in methods that recursively
//! visit types to transform them (e.g. `Type::normalize`) or to decide a relation between a pair
//! of types (e.g. `Type::has_relation_to`).
//!
//! The typical pattern is that the "entry" method (e.g. `Type::has_relation_to`) will create a
//! visitor and pass it to the recursive method (e.g. `Type::has_relation_to_impl`). Rust types
//! that form part of a complex type (e.g. tuples, protocols, nominal instances, etc) should
//! usually just implement the recursive method, and all recursive calls should call the recursive
//! method and pass along the visitor.
//!
//! Not all recursive calls need to actually call `.visit` on the visitor; only when visiting types
//! that can create a recursive relationship (this includes, for example, type aliases and
//! protocols).
//!
//! There is a risk of double-visiting, for example if `Type::has_relation_to_impl` calls
//! `visitor.visit` when visiting a protocol type, and then internal `has_relation_to_impl` methods
//! of the Rust types implementing protocols also call `visitor.visit`. The best way to avoid this
//! is to prefer always calling `visitor.visit` only in the main recursive method on `Type`.
use std::cell::{Cell, RefCell};
use std::cmp::Eq;
use std::hash::Hash;
use std::marker::PhantomData;
use rustc_hash::FxHashMap;
use crate::FxIndexSet;
use crate::types::Type;
/// Maximum recursion depth for cycle detection.
///
/// This is a safety limit to prevent stack overflow when checking recursive generic protocols
/// that create infinitely growing type specializations. For example:
///
/// ```python
/// class C[T](Protocol):
/// a: 'C[set[T]]'
/// ```
///
/// When checking `C[set[int]]` against e.g. `C[Unknown]`, member `a` requires checking
/// `C[set[set[int]]]`, which in turn requires checking `C[set[set[set[int]]]]`, etc. Each level
/// creates a unique cache key, so the standard cycle detection doesn't catch it. The depth limit
/// ensures we bail out before hitting a stack overflow.
const MAX_RECURSION_DEPTH: u32 = 64;
pub(crate) type TypeTransformer<'db, Tag> = CycleDetector<Tag, Type<'db>, Type<'db>>;
impl<Tag> Default for TypeTransformer<'_, Tag> {
fn default() -> Self {
// TODO: proper recursive type handling
// This must be Any, not e.g. a todo type, because Any is the normalized form of the
// dynamic type (that is, todo types are normalized to Any).
CycleDetector::new(Type::any())
}
}
pub(crate) type PairVisitor<'db, Tag, C> = CycleDetector<Tag, (Type<'db>, Type<'db>), C>;
#[derive(Debug)]
pub struct CycleDetector<Tag, T, R> {
/// If the type we're visiting is present in `seen`, it indicates that we've hit a cycle (due
/// to a recursive type); we need to immediately short circuit the whole operation and return
/// the fallback value. That's why we pop items off the end of `seen` after we've visited them.
seen: RefCell<FxIndexSet<T>>,
/// Unlike `seen`, this field is a pure performance optimisation (and an essential one). If the
/// type we're trying to normalize is present in `cache`, it doesn't necessarily mean we've hit
/// a cycle: it just means that we've already visited this inner type as part of a bigger call
/// chain we're currently in. Since this cache is just a performance optimisation, it doesn't
/// make sense to pop items off the end of the cache after they've been visited (it would
/// sort-of defeat the point of a cache if we did!)
cache: RefCell<FxHashMap<T, R>>,
/// Current recursion depth. Used to prevent stack overflow if recursive generic types create
/// infinitely growing type specializations that don't trigger exact-match cycle detection.
depth: Cell<u32>,
fallback: R,
_tag: PhantomData<Tag>,
}
impl<Tag, T: Hash + Eq + Clone, R: Clone> CycleDetector<Tag, T, R> {
pub fn new(fallback: R) -> Self {
CycleDetector {
seen: RefCell::new(FxIndexSet::default()),
cache: RefCell::new(FxHashMap::default()),
depth: Cell::new(0),
fallback,
_tag: PhantomData,
}
}
pub fn visit(&self, item: T, func: impl FnOnce() -> R) -> R {
if let Some(val) = self.cache.borrow().get(&item) {
return val.clone();
}
// We hit a cycle
if !self.seen.borrow_mut().insert(item.clone()) {
return self.fallback.clone();
}
// Check depth limit to prevent stack overflow from recursive generic types
// with growing specializations (e.g., C[set[T]] -> C[set[set[T]]] -> ...)
let current_depth = self.depth.get();
if current_depth >= MAX_RECURSION_DEPTH {
self.seen.borrow_mut().pop();
return self.fallback.clone();
}
self.depth.set(current_depth + 1);
let ret = func();
self.depth.set(current_depth);
self.seen.borrow_mut().pop();
self.cache.borrow_mut().insert(item, ret.clone());
ret
}
pub fn try_visit(&self, item: T, func: impl FnOnce() -> Option<R>) -> Option<R> {
if let Some(val) = self.cache.borrow().get(&item) {
return Some(val.clone());
}
// We hit a cycle
if !self.seen.borrow_mut().insert(item.clone()) {
return Some(self.fallback.clone());
}
// Check depth limit to prevent stack overflow from recursive generic protocols
// with growing specializations (e.g., C[set[T]] -> C[set[set[T]]] -> ...)
let current_depth = self.depth.get();
if current_depth >= MAX_RECURSION_DEPTH {
self.seen.borrow_mut().pop();
return Some(self.fallback.clone());
}
self.depth.set(current_depth + 1);
let ret = func()?;
self.depth.set(current_depth);
self.seen.borrow_mut().pop();
self.cache.borrow_mut().insert(item, ret.clone());
Some(ret)
}
}
impl<Tag, T: Hash + Eq + Clone, R: Default + Clone> Default for CycleDetector<Tag, T, R> {
fn default() -> Self {
CycleDetector::new(R::default())
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/visitor.rs | crates/ty_python_semantic/src/types/visitor.rs | use crate::{
Db, FxIndexSet,
types::{
BoundMethodType, BoundSuperType, BoundTypeVarInstance, CallableType, GenericAlias,
IntersectionType, KnownBoundMethodType, KnownInstanceType, NominalInstanceType,
PropertyInstanceType, ProtocolInstanceType, SubclassOfType, Type, TypeAliasType,
TypeGuardType, TypeIsType, TypeVarInstance, TypedDictType, UnionType,
bound_super::walk_bound_super_type,
class::walk_generic_alias,
function::{FunctionType, walk_function_type},
instance::{walk_nominal_instance_type, walk_protocol_instance_type},
newtype::{NewType, walk_newtype_instance_type},
subclass_of::walk_subclass_of_type,
walk_bound_method_type, walk_bound_type_var_type, walk_callable_type,
walk_intersection_type, walk_known_instance_type, walk_method_wrapper_type,
walk_property_instance_type, walk_type_alias_type, walk_type_var_type,
walk_typed_dict_type, walk_typeguard_type, walk_typeis_type, walk_union,
},
};
use std::cell::{Cell, RefCell};
/// A visitor trait that recurses into nested types.
///
/// The trait does not guard against infinite recursion out of the box,
/// but it makes it easy for implementors of the trait to do so.
/// See [`any_over_type`] for an example of how to do this.
pub(crate) trait TypeVisitor<'db> {
/// Should the visitor trigger inference of and visit lazily-inferred type attributes?
fn should_visit_lazy_type_attributes(&self) -> bool;
fn visit_type(&self, db: &'db dyn Db, ty: Type<'db>);
fn visit_union_type(&self, db: &'db dyn Db, union: UnionType<'db>) {
walk_union(db, union, self);
}
fn visit_intersection_type(&self, db: &'db dyn Db, intersection: IntersectionType<'db>) {
walk_intersection_type(db, intersection, self);
}
fn visit_callable_type(&self, db: &'db dyn Db, callable: CallableType<'db>) {
walk_callable_type(db, callable, self);
}
fn visit_property_instance_type(&self, db: &'db dyn Db, property: PropertyInstanceType<'db>) {
walk_property_instance_type(db, property, self);
}
fn visit_typeis_type(&self, db: &'db dyn Db, type_is: TypeIsType<'db>) {
walk_typeis_type(db, type_is, self);
}
fn visit_typeguard_type(&self, db: &'db dyn Db, type_is: TypeGuardType<'db>) {
walk_typeguard_type(db, type_is, self);
}
fn visit_subclass_of_type(&self, db: &'db dyn Db, subclass_of: SubclassOfType<'db>) {
walk_subclass_of_type(db, subclass_of, self);
}
fn visit_generic_alias_type(&self, db: &'db dyn Db, alias: GenericAlias<'db>) {
walk_generic_alias(db, alias, self);
}
fn visit_function_type(&self, db: &'db dyn Db, function: FunctionType<'db>) {
walk_function_type(db, function, self);
}
fn visit_bound_method_type(&self, db: &'db dyn Db, method: BoundMethodType<'db>) {
walk_bound_method_type(db, method, self);
}
fn visit_bound_super_type(&self, db: &'db dyn Db, bound_super: BoundSuperType<'db>) {
walk_bound_super_type(db, bound_super, self);
}
fn visit_nominal_instance_type(&self, db: &'db dyn Db, nominal: NominalInstanceType<'db>) {
walk_nominal_instance_type(db, nominal, self);
}
fn visit_bound_type_var_type(&self, db: &'db dyn Db, bound_typevar: BoundTypeVarInstance<'db>) {
walk_bound_type_var_type(db, bound_typevar, self);
}
fn visit_type_var_type(&self, db: &'db dyn Db, typevar: TypeVarInstance<'db>) {
walk_type_var_type(db, typevar, self);
}
fn visit_protocol_instance_type(&self, db: &'db dyn Db, protocol: ProtocolInstanceType<'db>) {
walk_protocol_instance_type(db, protocol, self);
}
fn visit_method_wrapper_type(
&self,
db: &'db dyn Db,
method_wrapper: KnownBoundMethodType<'db>,
) {
walk_method_wrapper_type(db, method_wrapper, self);
}
fn visit_known_instance_type(&self, db: &'db dyn Db, known_instance: KnownInstanceType<'db>) {
walk_known_instance_type(db, known_instance, self);
}
fn visit_type_alias_type(&self, db: &'db dyn Db, type_alias: TypeAliasType<'db>) {
walk_type_alias_type(db, type_alias, self);
}
fn visit_typed_dict_type(&self, db: &'db dyn Db, typed_dict: TypedDictType<'db>) {
walk_typed_dict_type(db, typed_dict, self);
}
fn visit_newtype_instance_type(&self, db: &'db dyn Db, newtype: NewType<'db>) {
walk_newtype_instance_type(db, newtype, self);
}
}
/// Enumeration of types that may contain other types, such as unions, intersections, and generics.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub(super) enum NonAtomicType<'db> {
Union(UnionType<'db>),
Intersection(IntersectionType<'db>),
FunctionLiteral(FunctionType<'db>),
BoundMethod(BoundMethodType<'db>),
BoundSuper(BoundSuperType<'db>),
MethodWrapper(KnownBoundMethodType<'db>),
Callable(CallableType<'db>),
GenericAlias(GenericAlias<'db>),
KnownInstance(KnownInstanceType<'db>),
SubclassOf(SubclassOfType<'db>),
NominalInstance(NominalInstanceType<'db>),
PropertyInstance(PropertyInstanceType<'db>),
TypeIs(TypeIsType<'db>),
TypeGuard(TypeGuardType<'db>),
TypeVar(BoundTypeVarInstance<'db>),
ProtocolInstance(ProtocolInstanceType<'db>),
TypedDict(TypedDictType<'db>),
TypeAlias(TypeAliasType<'db>),
NewTypeInstance(NewType<'db>),
}
pub(super) enum TypeKind<'db> {
Atomic,
NonAtomic(NonAtomicType<'db>),
}
impl<'db> From<Type<'db>> for TypeKind<'db> {
fn from(ty: Type<'db>) -> Self {
match ty {
Type::AlwaysFalsy
| Type::AlwaysTruthy
| Type::Never
| Type::LiteralString
| Type::IntLiteral(_)
| Type::BooleanLiteral(_)
| Type::StringLiteral(_)
| Type::BytesLiteral(_)
| Type::EnumLiteral(_)
| Type::DataclassDecorator(_)
| Type::DataclassTransformer(_)
| Type::WrapperDescriptor(_)
| Type::ModuleLiteral(_)
| Type::ClassLiteral(_)
| Type::SpecialForm(_)
| Type::Dynamic(_) => TypeKind::Atomic,
// Non-atomic types
Type::FunctionLiteral(function) => {
TypeKind::NonAtomic(NonAtomicType::FunctionLiteral(function))
}
Type::Intersection(intersection) => {
TypeKind::NonAtomic(NonAtomicType::Intersection(intersection))
}
Type::Union(union) => TypeKind::NonAtomic(NonAtomicType::Union(union)),
Type::BoundMethod(method) => TypeKind::NonAtomic(NonAtomicType::BoundMethod(method)),
Type::BoundSuper(bound_super) => {
TypeKind::NonAtomic(NonAtomicType::BoundSuper(bound_super))
}
Type::KnownBoundMethod(method_wrapper) => {
TypeKind::NonAtomic(NonAtomicType::MethodWrapper(method_wrapper))
}
Type::Callable(callable) => TypeKind::NonAtomic(NonAtomicType::Callable(callable)),
Type::GenericAlias(alias) => TypeKind::NonAtomic(NonAtomicType::GenericAlias(alias)),
Type::KnownInstance(known_instance) => {
TypeKind::NonAtomic(NonAtomicType::KnownInstance(known_instance))
}
Type::SubclassOf(subclass_of) => {
TypeKind::NonAtomic(NonAtomicType::SubclassOf(subclass_of))
}
Type::NominalInstance(nominal) => {
TypeKind::NonAtomic(NonAtomicType::NominalInstance(nominal))
}
Type::ProtocolInstance(protocol) => {
TypeKind::NonAtomic(NonAtomicType::ProtocolInstance(protocol))
}
Type::PropertyInstance(property) => {
TypeKind::NonAtomic(NonAtomicType::PropertyInstance(property))
}
Type::TypeVar(bound_typevar) => {
TypeKind::NonAtomic(NonAtomicType::TypeVar(bound_typevar))
}
Type::TypeIs(type_is) => TypeKind::NonAtomic(NonAtomicType::TypeIs(type_is)),
Type::TypeGuard(type_guard) => {
TypeKind::NonAtomic(NonAtomicType::TypeGuard(type_guard))
}
Type::TypedDict(typed_dict) => {
TypeKind::NonAtomic(NonAtomicType::TypedDict(typed_dict))
}
Type::TypeAlias(alias) => TypeKind::NonAtomic(NonAtomicType::TypeAlias(alias)),
Type::NewTypeInstance(newtype) => {
TypeKind::NonAtomic(NonAtomicType::NewTypeInstance(newtype))
}
}
}
}
pub(super) fn walk_non_atomic_type<'db, V: TypeVisitor<'db> + ?Sized>(
db: &'db dyn Db,
non_atomic_type: NonAtomicType<'db>,
visitor: &V,
) {
match non_atomic_type {
NonAtomicType::FunctionLiteral(function) => visitor.visit_function_type(db, function),
NonAtomicType::Intersection(intersection) => {
visitor.visit_intersection_type(db, intersection);
}
NonAtomicType::Union(union) => visitor.visit_union_type(db, union),
NonAtomicType::BoundMethod(method) => visitor.visit_bound_method_type(db, method),
NonAtomicType::BoundSuper(bound_super) => visitor.visit_bound_super_type(db, bound_super),
NonAtomicType::MethodWrapper(method_wrapper) => {
visitor.visit_method_wrapper_type(db, method_wrapper);
}
NonAtomicType::Callable(callable) => visitor.visit_callable_type(db, callable),
NonAtomicType::GenericAlias(alias) => visitor.visit_generic_alias_type(db, alias),
NonAtomicType::KnownInstance(known_instance) => {
visitor.visit_known_instance_type(db, known_instance);
}
NonAtomicType::SubclassOf(subclass_of) => visitor.visit_subclass_of_type(db, subclass_of),
NonAtomicType::NominalInstance(nominal) => visitor.visit_nominal_instance_type(db, nominal),
NonAtomicType::PropertyInstance(property) => {
visitor.visit_property_instance_type(db, property);
}
NonAtomicType::TypeIs(type_is) => visitor.visit_typeis_type(db, type_is),
NonAtomicType::TypeGuard(type_guard) => visitor.visit_typeguard_type(db, type_guard),
NonAtomicType::TypeVar(bound_typevar) => {
visitor.visit_bound_type_var_type(db, bound_typevar);
}
NonAtomicType::ProtocolInstance(protocol) => {
visitor.visit_protocol_instance_type(db, protocol);
}
NonAtomicType::TypedDict(typed_dict) => visitor.visit_typed_dict_type(db, typed_dict),
NonAtomicType::TypeAlias(alias) => {
visitor.visit_type_alias_type(db, alias);
}
NonAtomicType::NewTypeInstance(newtype) => {
visitor.visit_newtype_instance_type(db, newtype);
}
}
}
pub(crate) fn walk_type_with_recursion_guard<'db>(
db: &'db dyn Db,
ty: Type<'db>,
visitor: &impl TypeVisitor<'db>,
recursion_guard: &TypeCollector<'db>,
) {
match TypeKind::from(ty) {
TypeKind::Atomic => {}
TypeKind::NonAtomic(non_atomic_type) => {
if recursion_guard.type_was_already_seen(ty) {
// If we have already seen this type, we can skip it.
return;
}
walk_non_atomic_type(db, non_atomic_type, visitor);
}
}
}
#[derive(Default, Debug)]
pub(crate) struct TypeCollector<'db>(RefCell<FxIndexSet<Type<'db>>>);
impl<'db> TypeCollector<'db> {
pub(crate) fn type_was_already_seen(&self, ty: Type<'db>) -> bool {
!self.0.borrow_mut().insert(ty)
}
}
/// Return `true` if `ty`, or any of the types contained in `ty`, match the closure passed in.
///
/// The function guards against infinite recursion
/// by keeping track of the non-atomic types it has already seen.
///
/// The `should_visit_lazy_type_attributes` parameter controls whether deferred type attributes
/// (value of a type alias, attributes of a class-based protocol, bounds/constraints of a typevar)
/// are visited or not.
pub(super) fn any_over_type<'db>(
db: &'db dyn Db,
ty: Type<'db>,
query: &dyn Fn(Type<'db>) -> bool,
should_visit_lazy_type_attributes: bool,
) -> bool {
struct AnyOverTypeVisitor<'db, 'a> {
query: &'a dyn Fn(Type<'db>) -> bool,
recursion_guard: TypeCollector<'db>,
found_matching_type: Cell<bool>,
should_visit_lazy_type_attributes: bool,
}
impl<'db> TypeVisitor<'db> for AnyOverTypeVisitor<'db, '_> {
fn should_visit_lazy_type_attributes(&self) -> bool {
self.should_visit_lazy_type_attributes
}
fn visit_type(&self, db: &'db dyn Db, ty: Type<'db>) {
let already_found = self.found_matching_type.get();
if already_found {
return;
}
let found = already_found | (self.query)(ty);
self.found_matching_type.set(found);
if found {
return;
}
walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard);
}
}
let visitor = AnyOverTypeVisitor {
query,
recursion_guard: TypeCollector::default(),
found_matching_type: Cell::new(false),
should_visit_lazy_type_attributes,
};
visitor.visit_type(db, ty);
visitor.found_matching_type.get()
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/builder.rs | crates/ty_python_semantic/src/types/builder.rs | //! Smart builders for union and intersection types.
//!
//! Invariants we maintain here:
//! * No single-element union types (should just be the contained type instead.)
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
//! have a standalone negation type so there's no other representation for this.
//! * The same type should never appear more than once in a union or intersection. (This should
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
//! identity.)
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
//! flattens into the outer one), intersections cannot contain other intersections (also
//! flattens), and intersections cannot contain unions (the intersection distributes over the
//! union, inverting it into a union-of-intersections).
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
//! subtype from the union).
//! * No type in an intersection can be a supertype of any other type in the intersection (just
//! eliminate the supertype from the intersection).
//! * An intersection containing two non-overlapping types simplifies to [`Type::Never`].
//!
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
//! may end up returning a [`Type::Union`] of intersections.
//!
//! ## Performance
//!
//! In practice, there are two kinds of unions found in the wild: relatively-small unions made up
//! of normal user types (classes, etc), and large unions made up of literals, which can occur via
//! large enums (not yet implemented) or from string/integer/bytes literals, which can grow due to
//! literal arithmetic or operations on literal strings/bytes. For normal unions, it's most
//! efficient to just store the member types in a vector, and do O(n^2) `is_subtype_of` checks to
//! maintain the union in simplified form. But literal unions can grow to a size where this becomes
//! a performance problem. For this reason, we group literal types in `UnionBuilder`. Since every
//! different string literal type shares exactly the same possible super-types, and none of them
//! are subtypes of each other (unless exactly the same literal type), we can avoid many
//! unnecessary `is_subtype_of` checks.
use crate::types::enums::{enum_member_literals, enum_metadata};
use crate::types::type_ordering::union_or_intersection_elements_ordering;
use crate::types::{
BytesLiteralType, IntersectionType, KnownClass, StringLiteralType, Type,
TypeVarBoundOrConstraints, UnionType,
};
use crate::{Db, FxOrderSet};
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum LiteralKind {
Int,
String,
Bytes,
}
impl<'db> Type<'db> {
/// Return `true` if this type can be a supertype of some literals of `kind` and not others.
fn splits_literals(self, db: &'db dyn Db, kind: LiteralKind) -> bool {
match (self, kind) {
(Type::AlwaysFalsy | Type::AlwaysTruthy, _) => true,
(Type::StringLiteral(_), LiteralKind::String) => true,
(Type::BytesLiteral(_), LiteralKind::Bytes) => true,
(Type::IntLiteral(_), LiteralKind::Int) => true,
(Type::Intersection(intersection), _) => {
intersection
.positive(db)
.iter()
.any(|ty| ty.splits_literals(db, kind))
|| intersection
.negative(db)
.iter()
.any(|ty| ty.splits_literals(db, kind))
}
(Type::Union(union), _) => union
.elements(db)
.iter()
.any(|ty| ty.splits_literals(db, kind)),
_ => false,
}
}
}
#[derive(Debug)]
enum UnionElement<'db> {
IntLiterals(FxOrderSet<i64>),
StringLiterals(FxOrderSet<StringLiteralType<'db>>),
BytesLiterals(FxOrderSet<BytesLiteralType<'db>>),
Type(Type<'db>),
}
impl<'db> UnionElement<'db> {
const fn to_type_element(&self) -> Option<Type<'db>> {
match self {
UnionElement::Type(ty) => Some(*ty),
_ => None,
}
}
/// Try reducing this `UnionElement` given the presence in the same union of `other_type`.
fn try_reduce(&mut self, db: &'db dyn Db, other_type: Type<'db>) -> ReduceResult<'db> {
match self {
UnionElement::IntLiterals(literals) => {
if other_type.splits_literals(db, LiteralKind::Int) {
let mut collapse = false;
let mut ignore = false;
let negated = other_type.negate(db);
literals.retain(|literal| {
let ty = Type::IntLiteral(*literal);
if negated.is_subtype_of(db, ty) {
collapse = true;
}
if other_type.is_subtype_of(db, ty) {
ignore = true;
}
!ty.is_subtype_of(db, other_type)
});
if ignore {
ReduceResult::Ignore
} else if collapse {
ReduceResult::CollapseToObject
} else {
ReduceResult::KeepIf(!literals.is_empty())
}
} else {
ReduceResult::KeepIf(
!Type::IntLiteral(literals[0]).is_subtype_of(db, other_type),
)
}
}
UnionElement::StringLiterals(literals) => {
if other_type.splits_literals(db, LiteralKind::String) {
let mut collapse = false;
let mut ignore = false;
let negated = other_type.negate(db);
literals.retain(|literal| {
let ty = Type::StringLiteral(*literal);
if negated.is_subtype_of(db, ty) {
collapse = true;
}
if other_type.is_subtype_of(db, ty) {
ignore = true;
}
!ty.is_subtype_of(db, other_type)
});
if ignore {
ReduceResult::Ignore
} else if collapse {
ReduceResult::CollapseToObject
} else {
ReduceResult::KeepIf(!literals.is_empty())
}
} else {
ReduceResult::KeepIf(
!Type::StringLiteral(literals[0]).is_subtype_of(db, other_type),
)
}
}
UnionElement::BytesLiterals(literals) => {
if other_type.splits_literals(db, LiteralKind::Bytes) {
let mut collapse = false;
let mut ignore = false;
let negated = other_type.negate(db);
literals.retain(|literal| {
let ty = Type::BytesLiteral(*literal);
if negated.is_subtype_of(db, ty) {
collapse = true;
}
if other_type.is_subtype_of(db, ty) {
ignore = true;
}
!ty.is_subtype_of(db, other_type)
});
if ignore {
ReduceResult::Ignore
} else if collapse {
ReduceResult::CollapseToObject
} else {
ReduceResult::KeepIf(!literals.is_empty())
}
} else {
ReduceResult::KeepIf(
!Type::BytesLiteral(literals[0]).is_subtype_of(db, other_type),
)
}
}
UnionElement::Type(existing) => ReduceResult::Type(*existing),
}
}
}
enum ReduceResult<'db> {
/// Reduction of this `UnionElement` is complete; keep it in the union if the nested
/// boolean is true, eliminate it from the union if false.
KeepIf(bool),
/// Collapse this entire union to `object`.
CollapseToObject,
/// The new element is a subtype of an existing part of the `UnionElement`, ignore it.
Ignore,
/// The given `Type` can stand-in for the entire `UnionElement` for further union
/// simplification checks.
Type(Type<'db>),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)]
pub enum RecursivelyDefined {
Yes,
No,
}
impl RecursivelyDefined {
const fn is_yes(self) -> bool {
matches!(self, RecursivelyDefined::Yes)
}
const fn or(self, other: RecursivelyDefined) -> RecursivelyDefined {
match (self, other) {
(RecursivelyDefined::Yes, _) | (_, RecursivelyDefined::Yes) => RecursivelyDefined::Yes,
_ => RecursivelyDefined::No,
}
}
}
/// If the value is defined recursively, widening is performed from fewer literal elements, resulting in faster convergence of the fixed-point iteration.
const MAX_RECURSIVE_UNION_LITERALS: usize = 10;
/// If the value is defined non-recursively, the fixed-point iteration will converge in one go,
/// so in principle we can have as many literal elements as we want, but to avoid unintended huge computational loads, we limit it to 256.
const MAX_NON_RECURSIVE_UNION_LITERALS: usize = 256;
pub(crate) struct UnionBuilder<'db> {
elements: Vec<UnionElement<'db>>,
db: &'db dyn Db,
unpack_aliases: bool,
order_elements: bool,
// This is enabled when joining types in a `cycle_recovery` function.
// Since a cycle cannot be created within a `cycle_recovery` function, execution of `is_redundant_with` is skipped.
cycle_recovery: bool,
recursively_defined: RecursivelyDefined,
}
impl<'db> UnionBuilder<'db> {
pub(crate) fn new(db: &'db dyn Db) -> Self {
Self {
db,
elements: vec![],
unpack_aliases: true,
order_elements: false,
cycle_recovery: false,
recursively_defined: RecursivelyDefined::No,
}
}
pub(crate) fn unpack_aliases(mut self, val: bool) -> Self {
self.unpack_aliases = val;
self
}
pub(crate) fn order_elements(mut self, val: bool) -> Self {
self.order_elements = val;
self
}
pub(crate) fn cycle_recovery(mut self, val: bool) -> Self {
self.cycle_recovery = val;
if self.cycle_recovery {
self.unpack_aliases = false;
}
self
}
pub(crate) fn recursively_defined(mut self, val: RecursivelyDefined) -> Self {
self.recursively_defined = val;
self
}
pub(crate) fn is_empty(&self) -> bool {
self.elements.is_empty()
}
/// Collapse the union to a single type: `object`.
fn collapse_to_object(&mut self) {
self.elements.clear();
self.elements.push(UnionElement::Type(Type::object()));
}
fn widen_literal_types(&mut self, seen_aliases: &mut Vec<Type<'db>>) {
let mut replace_with = vec![];
for elem in &self.elements {
match elem {
UnionElement::IntLiterals(_) => {
replace_with.push(KnownClass::Int.to_instance(self.db));
}
UnionElement::StringLiterals(_) => {
replace_with.push(KnownClass::Str.to_instance(self.db));
}
UnionElement::BytesLiterals(_) => {
replace_with.push(KnownClass::Bytes.to_instance(self.db));
}
UnionElement::Type(_) => {}
}
}
for ty in replace_with {
self.add_in_place_impl(ty, seen_aliases);
}
}
/// Adds a type to this union.
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
self.add_in_place(ty);
self
}
/// Adds a type to this union.
pub(crate) fn add_in_place(&mut self, ty: Type<'db>) {
self.add_in_place_impl(ty, &mut vec![]);
}
pub(crate) fn add_in_place_impl(&mut self, ty: Type<'db>, seen_aliases: &mut Vec<Type<'db>>) {
let cycle_recovery = self.cycle_recovery;
let should_widen = |literals, recursively_defined: RecursivelyDefined| {
if recursively_defined.is_yes() && cycle_recovery {
literals >= MAX_RECURSIVE_UNION_LITERALS
} else {
literals >= MAX_NON_RECURSIVE_UNION_LITERALS
}
};
let mut ty_negated_cache = None;
let mut ty_negated = || *ty_negated_cache.get_or_insert_with(|| ty.negate(self.db));
match ty {
Type::Union(union) => {
let new_elements = union.elements(self.db);
self.elements.reserve(new_elements.len());
for element in new_elements {
self.add_in_place_impl(*element, seen_aliases);
}
self.recursively_defined = self
.recursively_defined
.or(union.recursively_defined(self.db));
if self.cycle_recovery && self.recursively_defined.is_yes() {
let literals = self.elements.iter().fold(0, |acc, elem| match elem {
UnionElement::IntLiterals(literals) => acc + literals.len(),
UnionElement::StringLiterals(literals) => acc + literals.len(),
UnionElement::BytesLiterals(literals) => acc + literals.len(),
UnionElement::Type(_) => acc,
});
if should_widen(literals, self.recursively_defined) {
self.widen_literal_types(seen_aliases);
}
}
}
// Adding `Never` to a union is a no-op.
Type::Never => {}
Type::TypeAlias(alias) if self.unpack_aliases => {
if seen_aliases.contains(&ty) {
// Union contains itself recursively via a type alias. This is an error, just
// leave out the recursive alias. TODO surface this error.
} else {
seen_aliases.push(ty);
self.add_in_place_impl(alias.value_type(self.db), seen_aliases);
}
}
// If adding a string literal, look for an existing `UnionElement::StringLiterals` to
// add it to, or an existing element that is a super-type of string literals, which
// means we shouldn't add it. Otherwise, add a new `UnionElement::StringLiterals`
// containing it.
Type::StringLiteral(literal) => {
let mut found = None;
let mut to_remove = None;
for (index, element) in self.elements.iter_mut().enumerate() {
match element {
UnionElement::StringLiterals(literals) => {
if should_widen(literals.len(), self.recursively_defined) {
let replace_with = KnownClass::Str.to_instance(self.db);
self.add_in_place_impl(replace_with, seen_aliases);
return;
}
found = Some(literals);
continue;
}
UnionElement::Type(existing) => {
// e.g. `existing` could be `Literal[""] & Any`,
// and `ty` could be `Literal[""]`
if ty.is_subtype_of(self.db, *existing) {
return;
}
if existing.is_subtype_of(self.db, ty) {
to_remove = Some(index);
continue;
}
if ty_negated().is_subtype_of(self.db, *existing) {
// The type that includes both this new element, and its negation
// (or a supertype of its negation), must be simply `object`.
self.collapse_to_object();
return;
}
}
_ => {}
}
}
if let Some(found) = found {
found.insert(literal);
} else {
self.elements
.push(UnionElement::StringLiterals(FxOrderSet::from_iter([
literal,
])));
}
if let Some(index) = to_remove {
self.elements.swap_remove(index);
}
}
// Same for bytes literals as for string literals, above.
Type::BytesLiteral(literal) => {
let mut found = None;
let mut to_remove = None;
for (index, element) in self.elements.iter_mut().enumerate() {
match element {
UnionElement::BytesLiterals(literals) => {
if should_widen(literals.len(), self.recursively_defined) {
let replace_with = KnownClass::Bytes.to_instance(self.db);
self.add_in_place_impl(replace_with, seen_aliases);
return;
}
found = Some(literals);
continue;
}
UnionElement::Type(existing) => {
if ty.is_subtype_of(self.db, *existing) {
return;
}
// e.g. `existing` could be `Literal[b""] & Any`,
// and `ty` could be `Literal[b""]`
if existing.is_subtype_of(self.db, ty) {
to_remove = Some(index);
continue;
}
if ty_negated().is_subtype_of(self.db, *existing) {
// The type that includes both this new element, and its negation
// (or a supertype of its negation), must be simply `object`.
self.collapse_to_object();
return;
}
}
_ => {}
}
}
if let Some(found) = found {
found.insert(literal);
} else {
self.elements
.push(UnionElement::BytesLiterals(FxOrderSet::from_iter([
literal,
])));
}
if let Some(index) = to_remove {
self.elements.swap_remove(index);
}
}
// And same for int literals as well.
Type::IntLiteral(literal) => {
let mut found = None;
let mut to_remove = None;
for (index, element) in self.elements.iter_mut().enumerate() {
match element {
UnionElement::IntLiterals(literals) => {
if should_widen(literals.len(), self.recursively_defined) {
let replace_with = KnownClass::Int.to_instance(self.db);
self.add_in_place_impl(replace_with, seen_aliases);
return;
}
found = Some(literals);
continue;
}
UnionElement::Type(existing) => {
if ty.is_subtype_of(self.db, *existing) {
return;
}
// e.g. `existing` could be `Literal[1] & Any`,
// and `ty` could be `Literal[1]`
if existing.is_subtype_of(self.db, ty) {
to_remove = Some(index);
continue;
}
if ty_negated().is_subtype_of(self.db, *existing) {
// The type that includes both this new element, and its negation
// (or a supertype of its negation), must be simply `object`.
self.collapse_to_object();
return;
}
}
_ => {}
}
}
if let Some(found) = found {
found.insert(literal);
} else {
self.elements
.push(UnionElement::IntLiterals(FxOrderSet::from_iter([literal])));
}
if let Some(index) = to_remove {
self.elements.swap_remove(index);
}
}
Type::EnumLiteral(enum_member_to_add) => {
let enum_class = enum_member_to_add.enum_class(self.db);
let metadata =
enum_metadata(self.db, enum_class).expect("Class of enum literal is an enum");
let enum_members_in_union = self
.elements
.iter()
.filter_map(UnionElement::to_type_element)
.filter_map(Type::as_enum_literal)
.map(|literal| literal.name(self.db))
.chain(std::iter::once(enum_member_to_add.name(self.db)))
.collect::<FxHashSet<_>>();
let all_members_are_in_union = metadata
.members
.keys()
.all(|name| enum_members_in_union.contains(name));
if all_members_are_in_union {
self.add_in_place_impl(
enum_member_to_add.enum_class_instance(self.db),
seen_aliases,
);
} else if !self
.elements
.iter()
.filter_map(UnionElement::to_type_element)
.any(|ty| Type::EnumLiteral(enum_member_to_add).is_subtype_of(self.db, ty))
{
self.push_type(Type::EnumLiteral(enum_member_to_add), seen_aliases);
}
}
// Adding `object` to a union results in `object`.
ty if ty.is_object() => {
self.collapse_to_object();
}
_ => {
self.push_type(ty, seen_aliases);
}
}
}
fn push_type(&mut self, ty: Type<'db>, seen_aliases: &mut Vec<Type<'db>>) {
let bool_pair = if let Type::BooleanLiteral(b) = ty {
Some(Type::BooleanLiteral(!b))
} else {
None
};
// If an alias gets here, it means we aren't unpacking aliases, and we also
// shouldn't try to simplify aliases out of the union, because that will require
// unpacking them.
let should_simplify_full = !matches!(ty, Type::TypeAlias(_)) && !self.cycle_recovery;
let mut ty_negated: Option<Type> = None;
let mut to_remove = SmallVec::<[usize; 2]>::new();
for (i, element) in self.elements.iter_mut().enumerate() {
let element_type = match element.try_reduce(self.db, ty) {
ReduceResult::KeepIf(keep) => {
if !keep {
to_remove.push(i);
}
continue;
}
ReduceResult::Type(ty) => ty,
ReduceResult::CollapseToObject => {
self.collapse_to_object();
return;
}
ReduceResult::Ignore => {
return;
}
};
if ty == element_type {
return;
}
if Some(element_type) == bool_pair {
self.add_in_place_impl(KnownClass::Bool.to_instance(self.db), seen_aliases);
return;
}
// Comparing `TypedDict`s for redundancy requires iterating over their fields, which is
// problematic if some of those fields point to recursive `Union`s. To avoid cycles,
// compare `TypedDict`s by name/identity instead of using the `has_relation_to`
// machinery.
if element_type.is_typed_dict() && ty.is_typed_dict() {
continue;
}
if should_simplify_full && !matches!(element_type, Type::TypeAlias(_)) {
if ty.is_redundant_with(self.db, element_type) {
return;
}
if element_type.is_redundant_with(self.db, ty) {
to_remove.push(i);
continue;
}
let negated = ty_negated.get_or_insert_with(|| ty.negate(self.db));
if negated.is_subtype_of(self.db, element_type) {
// We add `ty` to the union. We just checked that `~ty` is a subtype of an
// existing `element`. This also means that `~ty | ty` is a subtype of
// `element | ty`, because both elements in the first union are subtypes of
// the corresponding elements in the second union. But `~ty | ty` is just
// `object`. Since `object` is a subtype of `element | ty`, we can only
// conclude that `element | ty` must be `object` (object has no other
// supertypes). This means we can simplify the whole union to just
// `object`, since all other potential elements would also be subtypes of
// `object`.
self.collapse_to_object();
return;
}
}
}
let mut to_remove = to_remove.into_iter();
if let Some(first) = to_remove.next() {
self.elements[first] = UnionElement::Type(ty);
// We iterate in descending order to keep remaining indices valid after `swap_remove`.
for index in to_remove.rev() {
self.elements.swap_remove(index);
}
} else {
self.elements.push(UnionElement::Type(ty));
}
}
pub(crate) fn build(self) -> Type<'db> {
self.try_build().unwrap_or(Type::Never)
}
pub(crate) fn try_build(self) -> Option<Type<'db>> {
let mut types = vec![];
for element in self.elements {
match element {
UnionElement::IntLiterals(literals) => {
types.extend(literals.into_iter().map(Type::IntLiteral));
}
UnionElement::StringLiterals(literals) => {
types.extend(literals.into_iter().map(Type::StringLiteral));
}
UnionElement::BytesLiterals(literals) => {
types.extend(literals.into_iter().map(Type::BytesLiteral));
}
UnionElement::Type(ty) => types.push(ty),
}
}
if self.order_elements {
types.sort_unstable_by(|l, r| union_or_intersection_elements_ordering(self.db, l, r));
}
match types.len() {
0 => None,
1 => Some(types[0]),
_ => Some(Type::Union(UnionType::new(
self.db,
types.into_boxed_slice(),
self.recursively_defined,
))),
}
}
}
#[derive(Clone)]
pub(crate) struct IntersectionBuilder<'db> {
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
// just a single intersection in this vector, and we are building a single intersection type,
// but if a union is added to the intersection, we'll distribute ourselves over that union and
// create a union of intersections.
intersections: Vec<InnerIntersectionBuilder<'db>>,
order_elements: bool,
db: &'db dyn Db,
}
impl<'db> IntersectionBuilder<'db> {
pub(crate) fn new(db: &'db dyn Db) -> Self {
Self {
db,
order_elements: false,
intersections: vec![InnerIntersectionBuilder::default()],
}
}
fn empty(db: &'db dyn Db) -> Self {
Self {
db,
order_elements: false,
intersections: vec![],
}
}
pub(crate) fn add_positive(self, ty: Type<'db>) -> Self {
self.add_positive_impl(ty, &mut vec![])
}
pub(crate) fn add_positive_impl(
mut self,
ty: Type<'db>,
seen_aliases: &mut Vec<Type<'db>>,
) -> Self {
match ty {
Type::TypeAlias(alias) => {
if seen_aliases.contains(&ty) {
// Recursive alias, add it without expanding to avoid infinite recursion.
for inner in &mut self.intersections {
inner.positive.insert(ty);
}
return self;
}
seen_aliases.push(ty);
let value_type = alias.value_type(self.db);
self.add_positive_impl(value_type, seen_aliases)
}
Type::Union(union) => {
// Distribute ourself over this union: for each union element, clone ourself and
// intersect with that union element, then create a new union-of-intersections with all
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
union
.elements(self.db)
.iter()
.map(|elem| self.clone().add_positive_impl(*elem, seen_aliases))
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
builder.intersections.extend(sub.intersections);
builder
})
}
// `(A & B & ~C) & (D & E & ~F)` -> `A & B & D & E & ~C & ~F`
Type::Intersection(other) => {
let db = self.db;
for pos in other.positive(db) {
self = self.add_positive_impl(*pos, seen_aliases);
}
for neg in other.negative(db) {
self = self.add_negative_impl(*neg, seen_aliases);
}
self
}
Type::NominalInstance(instance)
if enum_metadata(self.db, instance.class_literal(self.db)).is_some() =>
{
let mut contains_enum_literal_as_negative_element = false;
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | true |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/class_base.rs | crates/ty_python_semantic/src/types/class_base.rs | use crate::Db;
use crate::types::class::CodeGeneratorKind;
use crate::types::generics::Specialization;
use crate::types::tuple::TupleType;
use crate::types::{
ApplyTypeMappingVisitor, ClassLiteral, ClassType, DynamicType, KnownClass, KnownInstanceType,
MaterializationKind, MroError, MroIterator, NormalizedVisitor, SpecialFormType, Type,
TypeContext, TypeMapping, todo_type,
};
/// Enumeration of the possible kinds of types we allow in class bases.
///
/// This is much more limited than the [`Type`] enum: all types that would be invalid to have as a
/// class base are transformed into [`ClassBase::unknown()`]
///
/// Note that a non-specialized generic class _cannot_ be a class base. When we see a
/// non-specialized generic class in any type expression (including the list of base classes), we
/// automatically construct the default specialization for that class.
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, salsa::Update, get_size2::GetSize)]
pub enum ClassBase<'db> {
Dynamic(DynamicType<'db>),
Class(ClassType<'db>),
/// Although `Protocol` is not a class in typeshed's stubs, it is at runtime,
/// and can appear in the MRO of a class.
Protocol,
/// Bare `Generic` cannot be subclassed directly in user code,
/// but nonetheless appears in the MRO of classes that inherit from `Generic[T]`,
/// `Protocol[T]`, or bare `Protocol`.
Generic,
TypedDict,
}
impl<'db> ClassBase<'db> {
pub(crate) const fn unknown() -> Self {
Self::Dynamic(DynamicType::Unknown)
}
pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
match self {
Self::Dynamic(dynamic) => Self::Dynamic(dynamic.normalized()),
Self::Class(class) => Self::Class(class.normalized_impl(db, visitor)),
Self::Protocol | Self::Generic | Self::TypedDict => self,
}
}
pub(super) fn recursive_type_normalized_impl(
self,
db: &'db dyn Db,
div: Type<'db>,
nested: bool,
) -> Option<Self> {
match self {
Self::Dynamic(dynamic) => Some(Self::Dynamic(dynamic.recursive_type_normalized())),
Self::Class(class) => Some(Self::Class(
class.recursive_type_normalized_impl(db, div, nested)?,
)),
Self::Protocol | Self::Generic | Self::TypedDict => Some(self),
}
}
pub(crate) fn name(self, db: &'db dyn Db) -> &'db str {
match self {
ClassBase::Class(class) => class.name(db),
ClassBase::Dynamic(DynamicType::Any) => "Any",
ClassBase::Dynamic(DynamicType::Unknown | DynamicType::UnknownGeneric(_)) => "Unknown",
ClassBase::Dynamic(
DynamicType::Todo(_) | DynamicType::TodoUnpack | DynamicType::TodoStarredExpression,
) => "@Todo",
ClassBase::Dynamic(DynamicType::Divergent(_)) => "Divergent",
ClassBase::Protocol => "Protocol",
ClassBase::Generic => "Generic",
ClassBase::TypedDict => "TypedDict",
}
}
/// Return a `ClassBase` representing the class `builtins.object`
pub(super) fn object(db: &'db dyn Db) -> Self {
KnownClass::Object
.to_class_literal(db)
.to_class_type(db)
.map_or(Self::unknown(), Self::Class)
}
pub(super) const fn is_typed_dict(self) -> bool {
matches!(self, ClassBase::TypedDict)
}
/// Attempt to resolve `ty` into a `ClassBase`.
///
/// Return `None` if `ty` is not an acceptable type for a class base.
pub(super) fn try_from_type(
db: &'db dyn Db,
ty: Type<'db>,
subclass: ClassLiteral<'db>,
) -> Option<Self> {
match ty {
Type::Dynamic(dynamic) => Some(Self::Dynamic(dynamic)),
Type::ClassLiteral(literal) => Some(Self::Class(literal.default_specialization(db))),
Type::GenericAlias(generic) => Some(Self::Class(ClassType::Generic(generic))),
Type::NominalInstance(instance)
if instance.has_known_class(db, KnownClass::GenericAlias) =>
{
Self::try_from_type(db, todo_type!("GenericAlias instance"), subclass)
}
Type::SubclassOf(subclass_of) => subclass_of
.subclass_of()
.into_dynamic()
.map(ClassBase::Dynamic),
Type::Intersection(inter) => {
let valid_element = inter
.positive(db)
.iter()
.find_map(|elem| ClassBase::try_from_type(db, *elem, subclass))?;
if ty.is_disjoint_from(db, KnownClass::Type.to_instance(db)) {
None
} else {
Some(valid_element)
}
}
Type::Union(union) => {
// We do not support full unions of MROs (yet). Until we do,
// support the cases where one of the types in the union is
// a dynamic type such as `Any` or `Unknown`, and all other
// types *would be* valid class bases. In this case, we can
// "fold" the other potential bases into the dynamic type,
// and return `Any`/`Unknown` as the class base to prevent
// invalid-base diagnostics and further downstream errors.
let Some(Type::Dynamic(dynamic)) = union
.elements(db)
.iter()
.find(|elem| matches!(elem, Type::Dynamic(_)))
else {
return None;
};
if union
.elements(db)
.iter()
.all(|elem| ClassBase::try_from_type(db, *elem, subclass).is_some())
{
Some(ClassBase::Dynamic(*dynamic))
} else {
None
}
}
Type::NominalInstance(_) => None, // TODO -- handle `__mro_entries__`?
// This likely means that we're in unreachable code,
// in which case we want to treat `Never` in a forgiving way and silence diagnostics
Type::Never => Some(ClassBase::unknown()),
Type::TypeAlias(alias) => Self::try_from_type(db, alias.value_type(db), subclass),
Type::NewTypeInstance(newtype) => {
ClassBase::try_from_type(db, newtype.concrete_base_type(db), subclass)
}
Type::PropertyInstance(_)
| Type::BooleanLiteral(_)
| Type::FunctionLiteral(_)
| Type::Callable(..)
| Type::BoundMethod(_)
| Type::KnownBoundMethod(_)
| Type::WrapperDescriptor(_)
| Type::DataclassDecorator(_)
| Type::DataclassTransformer(_)
| Type::BytesLiteral(_)
| Type::IntLiteral(_)
| Type::EnumLiteral(_)
| Type::StringLiteral(_)
| Type::LiteralString
| Type::ModuleLiteral(_)
| Type::TypeVar(_)
| Type::BoundSuper(_)
| Type::ProtocolInstance(_)
| Type::AlwaysFalsy
| Type::AlwaysTruthy
| Type::TypeIs(_)
| Type::TypeGuard(_)
| Type::TypedDict(_) => None,
Type::KnownInstance(known_instance) => match known_instance {
KnownInstanceType::SubscriptedGeneric(_) => Some(Self::Generic),
KnownInstanceType::SubscriptedProtocol(_) => Some(Self::Protocol),
KnownInstanceType::TypeAliasType(_)
| KnownInstanceType::TypeVar(_)
| KnownInstanceType::Deprecated(_)
| KnownInstanceType::Field(_)
| KnownInstanceType::ConstraintSet(_)
| KnownInstanceType::Callable(_)
| KnownInstanceType::GenericContext(_)
| KnownInstanceType::Specialization(_)
| KnownInstanceType::UnionType(_)
| KnownInstanceType::Literal(_)
| KnownInstanceType::LiteralStringAlias(_)
// A class inheriting from a newtype would make intuitive sense, but newtype
// wrappers are just identity callables at runtime, so this sort of inheritance
// doesn't work and isn't allowed.
| KnownInstanceType::NewType(_) => None,
KnownInstanceType::TypeGenericAlias(_) => {
Self::try_from_type(db, KnownClass::Type.to_class_literal(db), subclass)
}
KnownInstanceType::Annotated(ty) => {
// Unions are not supported in this position, so we only need to support
// something like `class C(Annotated[Base, "metadata"]): ...`, which we
// can do by turning the instance type (`Base` in this example) back into
// a class.
let annotated_ty = ty.inner(db);
let instance_ty = annotated_ty.as_nominal_instance()?;
Some(Self::Class(instance_ty.class(db)))
}
},
Type::SpecialForm(special_form) => match special_form {
SpecialFormType::Annotated
| SpecialFormType::Literal
| SpecialFormType::LiteralString
| SpecialFormType::Union
| SpecialFormType::NoReturn
| SpecialFormType::Never
| SpecialFormType::Final
| SpecialFormType::NotRequired
| SpecialFormType::TypeGuard
| SpecialFormType::TypeIs
| SpecialFormType::TypingSelf
| SpecialFormType::Unpack
| SpecialFormType::ClassVar
| SpecialFormType::Concatenate
| SpecialFormType::Required
| SpecialFormType::TypeAlias
| SpecialFormType::ReadOnly
| SpecialFormType::Optional
| SpecialFormType::Not
| SpecialFormType::Top
| SpecialFormType::Bottom
| SpecialFormType::Intersection
| SpecialFormType::TypeOf
| SpecialFormType::CallableTypeOf
| SpecialFormType::AlwaysTruthy
| SpecialFormType::AlwaysFalsy => None,
SpecialFormType::Any => Some(Self::Dynamic(DynamicType::Any)),
SpecialFormType::Unknown => Some(Self::unknown()),
SpecialFormType::Protocol => Some(Self::Protocol),
SpecialFormType::Generic => Some(Self::Generic),
SpecialFormType::NamedTuple => {
let fields = subclass.own_fields(db, None, CodeGeneratorKind::NamedTuple);
Self::try_from_type(
db,
TupleType::heterogeneous(
db,
fields.values().map(|field| field.declared_ty),
)?
.to_class_type(db)
.into(),
subclass,
)
}
// TODO: Classes inheriting from `typing.Type` et al. also have `Generic` in their MRO
SpecialFormType::Dict => {
Self::try_from_type(db, KnownClass::Dict.to_class_literal(db), subclass)
}
SpecialFormType::List => {
Self::try_from_type(db, KnownClass::List.to_class_literal(db), subclass)
}
SpecialFormType::Type => {
Self::try_from_type(db, KnownClass::Type.to_class_literal(db), subclass)
}
SpecialFormType::Tuple => {
Self::try_from_type(db, KnownClass::Tuple.to_class_literal(db), subclass)
}
SpecialFormType::Set => {
Self::try_from_type(db, KnownClass::Set.to_class_literal(db), subclass)
}
SpecialFormType::FrozenSet => {
Self::try_from_type(db, KnownClass::FrozenSet.to_class_literal(db), subclass)
}
SpecialFormType::ChainMap => {
Self::try_from_type(db, KnownClass::ChainMap.to_class_literal(db), subclass)
}
SpecialFormType::Counter => {
Self::try_from_type(db, KnownClass::Counter.to_class_literal(db), subclass)
}
SpecialFormType::DefaultDict => {
Self::try_from_type(db, KnownClass::DefaultDict.to_class_literal(db), subclass)
}
SpecialFormType::Deque => {
Self::try_from_type(db, KnownClass::Deque.to_class_literal(db), subclass)
}
SpecialFormType::OrderedDict => {
Self::try_from_type(db, KnownClass::OrderedDict.to_class_literal(db), subclass)
}
SpecialFormType::TypedDict => Some(Self::TypedDict),
SpecialFormType::Callable => Self::try_from_type(
db,
todo_type!("Support for Callable as a base class"),
subclass,
),
},
}
}
pub(super) fn into_class(self) -> Option<ClassType<'db>> {
match self {
Self::Class(class) => Some(class),
Self::Dynamic(_) | Self::Generic | Self::Protocol | Self::TypedDict => None,
}
}
fn apply_type_mapping_impl<'a>(
self,
db: &'db dyn Db,
type_mapping: &TypeMapping<'a, 'db>,
tcx: TypeContext<'db>,
visitor: &ApplyTypeMappingVisitor<'db>,
) -> Self {
match self {
Self::Class(class) => {
Self::Class(class.apply_type_mapping_impl(db, type_mapping, tcx, visitor))
}
Self::Dynamic(_) | Self::Generic | Self::Protocol | Self::TypedDict => self,
}
}
pub(crate) fn apply_optional_specialization(
self,
db: &'db dyn Db,
specialization: Option<Specialization<'db>>,
) -> Self {
if let Some(specialization) = specialization {
let new_self = self.apply_type_mapping_impl(
db,
&TypeMapping::Specialization(specialization),
TypeContext::default(),
&ApplyTypeMappingVisitor::default(),
);
match specialization.materialization_kind(db) {
None => new_self,
Some(materialization_kind) => new_self.materialize(db, materialization_kind),
}
} else {
self
}
}
fn materialize(self, db: &'db dyn Db, kind: MaterializationKind) -> Self {
self.apply_type_mapping_impl(
db,
&TypeMapping::Materialize(kind),
TypeContext::default(),
&ApplyTypeMappingVisitor::default(),
)
}
pub(super) fn has_cyclic_mro(self, db: &'db dyn Db) -> bool {
match self {
ClassBase::Class(class) => {
let (class_literal, specialization) = class.class_literal(db);
class_literal
.try_mro(db, specialization)
.is_err_and(MroError::is_cycle)
}
ClassBase::Dynamic(_)
| ClassBase::Generic
| ClassBase::Protocol
| ClassBase::TypedDict => false,
}
}
/// Iterate over the MRO of this base
pub(super) fn mro(
self,
db: &'db dyn Db,
additional_specialization: Option<Specialization<'db>>,
) -> impl Iterator<Item = ClassBase<'db>> {
match self {
ClassBase::Protocol => ClassBaseMroIterator::length_3(db, self, ClassBase::Generic),
ClassBase::Dynamic(_) | ClassBase::Generic | ClassBase::TypedDict => {
ClassBaseMroIterator::length_2(db, self)
}
ClassBase::Class(class) => {
ClassBaseMroIterator::from_class(db, class, additional_specialization)
}
}
}
pub(super) fn display(self, db: &'db dyn Db) -> impl std::fmt::Display {
struct ClassBaseDisplay<'db> {
db: &'db dyn Db,
base: ClassBase<'db>,
}
impl std::fmt::Display for ClassBaseDisplay<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.base {
ClassBase::Dynamic(dynamic) => dynamic.fmt(f),
ClassBase::Class(class) => Type::from(class).display(self.db).fmt(f),
ClassBase::Protocol => f.write_str("typing.Protocol"),
ClassBase::Generic => f.write_str("typing.Generic"),
ClassBase::TypedDict => f.write_str("typing.TypedDict"),
}
}
}
ClassBaseDisplay { db, base: self }
}
}
impl<'db> From<ClassType<'db>> for ClassBase<'db> {
fn from(value: ClassType<'db>) -> Self {
ClassBase::Class(value)
}
}
impl<'db> From<ClassBase<'db>> for Type<'db> {
fn from(value: ClassBase<'db>) -> Self {
match value {
ClassBase::Dynamic(dynamic) => Type::Dynamic(dynamic),
ClassBase::Class(class) => class.into(),
ClassBase::Protocol => Type::SpecialForm(SpecialFormType::Protocol),
ClassBase::Generic => Type::SpecialForm(SpecialFormType::Generic),
ClassBase::TypedDict => Type::SpecialForm(SpecialFormType::TypedDict),
}
}
}
impl<'db> From<&ClassBase<'db>> for Type<'db> {
fn from(value: &ClassBase<'db>) -> Self {
Self::from(*value)
}
}
/// An iterator over the MRO of a class base.
enum ClassBaseMroIterator<'db> {
Length2(core::array::IntoIter<ClassBase<'db>, 2>),
Length3(core::array::IntoIter<ClassBase<'db>, 3>),
FromClass(MroIterator<'db>),
}
impl<'db> ClassBaseMroIterator<'db> {
/// Iterate over an MRO of length 2 that consists of `first_element` and then `object`.
fn length_2(db: &'db dyn Db, first_element: ClassBase<'db>) -> Self {
ClassBaseMroIterator::Length2([first_element, ClassBase::object(db)].into_iter())
}
/// Iterate over an MRO of length 3 that consists of `first_element`, then `second_element`, then `object`.
fn length_3(db: &'db dyn Db, element_1: ClassBase<'db>, element_2: ClassBase<'db>) -> Self {
ClassBaseMroIterator::Length3([element_1, element_2, ClassBase::object(db)].into_iter())
}
/// Iterate over the MRO of an arbitrary class. The MRO may be of any length.
fn from_class(
db: &'db dyn Db,
class: ClassType<'db>,
additional_specialization: Option<Specialization<'db>>,
) -> Self {
ClassBaseMroIterator::FromClass(class.iter_mro_specialized(db, additional_specialization))
}
}
impl<'db> Iterator for ClassBaseMroIterator<'db> {
type Item = ClassBase<'db>;
fn next(&mut self) -> Option<Self::Item> {
match self {
Self::Length2(iter) => iter.next(),
Self::Length3(iter) => iter.next(),
Self::FromClass(iter) => iter.next(),
}
}
}
impl std::iter::FusedIterator for ClassBaseMroIterator<'_> {}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/call.rs | crates/ty_python_semantic/src/types/call.rs | use super::context::InferContext;
use super::{Signature, Type, TypeContext};
use crate::Db;
use crate::types::call::bind::BindingError;
use crate::types::{MemberLookupPolicy, PropertyInstanceType};
use ruff_python_ast as ast;
mod arguments;
pub(crate) mod bind;
pub(super) use arguments::{Argument, CallArguments};
pub(super) use bind::{Binding, Bindings, CallableBinding, MatchedArgument};
impl<'db> Type<'db> {
pub(crate) fn try_call_bin_op(
db: &'db dyn Db,
left_ty: Type<'db>,
op: ast::Operator,
right_ty: Type<'db>,
) -> Result<Bindings<'db>, CallBinOpError> {
Self::try_call_bin_op_with_policy(db, left_ty, op, right_ty, MemberLookupPolicy::default())
}
pub(crate) fn try_call_bin_op_with_policy(
db: &'db dyn Db,
left_ty: Type<'db>,
op: ast::Operator,
right_ty: Type<'db>,
policy: MemberLookupPolicy,
) -> Result<Bindings<'db>, CallBinOpError> {
// We either want to call lhs.__op__ or rhs.__rop__. The full decision tree from
// the Python spec [1] is:
//
// - If rhs is a (proper) subclass of lhs, and it provides a different
// implementation of __rop__, use that.
// - Otherwise, if lhs implements __op__, use that.
// - Otherwise, if lhs and rhs are different types, and rhs implements __rop__,
// use that.
//
// [1] https://docs.python.org/3/reference/datamodel.html#object.__radd__
// Technically we don't have to check left_ty != right_ty here, since if the types
// are the same, they will trivially have the same implementation of the reflected
// dunder, and so we'll fail the inner check. But the type equality check will be
// faster for the common case, and allow us to skip the (two) class member lookups.
let left_class = left_ty.to_meta_type(db);
let right_class = right_ty.to_meta_type(db);
if left_ty != right_ty && right_ty.is_subtype_of(db, left_ty) {
let reflected_dunder = op.reflected_dunder();
let rhs_reflected = right_class.member(db, reflected_dunder).place;
// TODO: if `rhs_reflected` is possibly unbound, we should union the two possible
// Bindings together
if !rhs_reflected.is_undefined()
&& rhs_reflected != left_class.member(db, reflected_dunder).place
{
return Ok(right_ty
.try_call_dunder_with_policy(
db,
reflected_dunder,
&mut CallArguments::positional([left_ty]),
TypeContext::default(),
policy,
)
.or_else(|_| {
left_ty.try_call_dunder_with_policy(
db,
op.dunder(),
&mut CallArguments::positional([right_ty]),
TypeContext::default(),
policy,
)
})?);
}
}
let call_on_left_instance = left_ty.try_call_dunder_with_policy(
db,
op.dunder(),
&mut CallArguments::positional([right_ty]),
TypeContext::default(),
policy,
);
call_on_left_instance.or_else(|_| {
if left_ty == right_ty {
Err(CallBinOpError::NotSupported)
} else {
Ok(right_ty.try_call_dunder_with_policy(
db,
op.reflected_dunder(),
&mut CallArguments::positional([left_ty]),
TypeContext::default(),
policy,
)?)
}
})
}
}
/// Wraps a [`Bindings`] for an unsuccessful call with information about why the call was
/// unsuccessful.
///
/// The bindings are boxed so that we do not pass around large `Err` variants on the stack.
#[derive(Debug)]
pub(crate) struct CallError<'db>(pub(crate) CallErrorKind, pub(crate) Box<Bindings<'db>>);
impl<'db> CallError<'db> {
/// Returns `Some(property)` if the call error was caused by an attempt to set a property
/// that has no setter, and `None` otherwise.
pub(crate) fn as_attempt_to_set_property_with_no_setter(
&self,
) -> Option<PropertyInstanceType<'db>> {
if self.0 != CallErrorKind::BindingError {
return None;
}
self.1
.iter()
.flatten()
.flat_map(bind::Binding::errors)
.find_map(|error| match error {
BindingError::PropertyHasNoSetter(property) => Some(*property),
_ => None,
})
}
}
/// The reason why calling a type failed.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum CallErrorKind {
/// The type is not callable. For a union type, _none_ of the union elements are callable.
NotCallable,
/// The type is not callable with the given arguments.
///
/// `BindingError` takes precedence over `PossiblyNotCallable`: for a union type, there might
/// be some union elements that are not callable at all, but the call arguments are not
/// compatible with at least one of the callable elements.
BindingError,
/// Not all of the elements of a union type are callable, but the call arguments are compatible
/// with all of the callable elements.
PossiblyNotCallable,
}
#[derive(Debug)]
pub(super) enum CallDunderError<'db> {
/// The dunder attribute exists but it can't be called with the given arguments.
///
/// This includes non-callable dunder attributes that are possibly unbound.
CallError(CallErrorKind, Box<Bindings<'db>>),
/// The type has the specified dunder method and it is callable
/// with the specified arguments without any binding errors
/// but it is possibly unbound.
PossiblyUnbound(Box<Bindings<'db>>),
/// The dunder method with the specified name is missing.
MethodNotAvailable,
}
impl<'db> CallDunderError<'db> {
pub(super) fn return_type(&self, db: &'db dyn Db) -> Option<Type<'db>> {
match self {
Self::MethodNotAvailable | Self::CallError(CallErrorKind::NotCallable, _) => None,
Self::CallError(_, bindings) => Some(bindings.return_type(db)),
Self::PossiblyUnbound(bindings) => Some(bindings.return_type(db)),
}
}
pub(super) fn fallback_return_type(&self, db: &'db dyn Db) -> Type<'db> {
self.return_type(db).unwrap_or(Type::unknown())
}
}
impl<'db> From<CallError<'db>> for CallDunderError<'db> {
fn from(CallError(kind, bindings): CallError<'db>) -> Self {
Self::CallError(kind, bindings)
}
}
#[derive(Debug)]
pub(crate) enum CallBinOpError {
/// The dunder attribute exists but it can't be called with the given arguments.
///
/// This includes non-callable dunder attributes that are possibly unbound.
CallError,
NotSupported,
}
impl From<CallDunderError<'_>> for CallBinOpError {
fn from(value: CallDunderError<'_>) -> Self {
match value {
CallDunderError::CallError(_, _) => Self::CallError,
CallDunderError::MethodNotAvailable | CallDunderError::PossiblyUnbound(_) => {
CallBinOpError::NotSupported
}
}
}
}
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | false |
astral-sh/ruff | https://github.com/astral-sh/ruff/blob/8464aca795bc3580ca15fcb52b21616939cea9a9/crates/ty_python_semantic/src/types/signatures.rs | crates/ty_python_semantic/src/types/signatures.rs | //! _Signatures_ describe the expected parameters and return type of a function or other callable.
//! Overloads and unions add complexity to this simple description.
//!
//! In a call expression, the type of the callable might be a union of several types. The call must
//! be compatible with _all_ of these types, since at runtime the callable might be an instance of
//! any of them.
//!
//! Each of the atomic types in the union must be callable. Each callable might be _overloaded_,
//! containing multiple _overload signatures_, each of which describes a different combination of
//! argument types and return types. For each callable type in the union, the call expression's
//! arguments must match _at least one_ overload.
use std::slice::Iter;
use itertools::{EitherOrBoth, Itertools};
use rustc_hash::FxHashMap;
use smallvec::{SmallVec, smallvec_inline};
use super::{DynamicType, Type, TypeVarVariance, definition_expression_type, semantic_index};
use crate::semantic_index::definition::Definition;
use crate::types::constraints::{
ConstraintSet, IteratorConstraintsExtension, OptionConstraintsExtension,
};
use crate::types::generics::{GenericContext, InferableTypeVars, walk_generic_context};
use crate::types::infer::{infer_deferred_types, infer_scope_types};
use crate::types::{
ApplyTypeMappingVisitor, BindingContext, BoundTypeVarInstance, CallableType, CallableTypeKind,
FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor,
KnownClass, MaterializationKind, NormalizedVisitor, ParamSpecAttrKind, TypeContext,
TypeMapping, TypeRelation, VarianceInferable, todo_type,
};
use crate::{Db, FxOrderSet};
use ruff_python_ast::{self as ast, name::Name};
/// Infer the type of a parameter or return annotation in a function signature.
///
/// This is very similar to [`definition_expression_type`], but knows that `TypeInferenceBuilder`
/// will always infer the parameters and return of a function in its PEP-695 typevar scope, if
/// there is one; otherwise they will be inferred in the function definition scope, but will always
/// be deferred. (This prevents spurious salsa cycles when we need the signature of the function
/// while in the middle of inferring its definition scope — for instance, when applying
/// decorators.)
fn function_signature_expression_type<'db>(
db: &'db dyn Db,
definition: Definition<'db>,
expression: &ast::Expr,
) -> Type<'db> {
let file = definition.file(db);
let index = semantic_index(db, file);
let file_scope = index.expression_scope_id(expression);
let scope = file_scope.to_scope_id(db, file);
if scope == definition.scope(db) {
// expression is in the function definition scope, but always deferred
infer_deferred_types(db, definition).expression_type(expression)
} else {
// expression is in the PEP-695 type params sub-scope
infer_scope_types(db, scope).expression_type(expression)
}
}
/// The signature of a single callable. If the callable is overloaded, there is a separate
/// [`Signature`] for each overload.
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update, get_size2::GetSize)]
pub struct CallableSignature<'db> {
/// The signatures of each overload of this callable. Will be empty if the type is not
/// callable.
pub(crate) overloads: SmallVec<[Signature<'db>; 1]>,
}
impl<'db> CallableSignature<'db> {
pub(crate) fn single(signature: Signature<'db>) -> Self {
Self {
overloads: smallvec_inline![signature],
}
}
pub(crate) fn bottom() -> Self {
Self::single(Signature::bottom())
}
/// Creates a new `CallableSignature` from an iterator of [`Signature`]s. Returns a
/// non-callable signature if the iterator is empty.
pub(crate) fn from_overloads<I>(overloads: I) -> Self
where
I: IntoIterator<Item = Signature<'db>>,
{
Self {
overloads: overloads.into_iter().collect(),
}
}
pub(crate) fn iter(&self) -> std::slice::Iter<'_, Signature<'db>> {
self.overloads.iter()
}
pub(crate) fn with_inherited_generic_context(
&self,
db: &'db dyn Db,
inherited_generic_context: GenericContext<'db>,
) -> Self {
Self::from_overloads(self.overloads.iter().map(|signature| {
signature
.clone()
.with_inherited_generic_context(db, inherited_generic_context)
}))
}
pub(crate) fn normalized_impl(
&self,
db: &'db dyn Db,
visitor: &NormalizedVisitor<'db>,
) -> Self {
Self::from_overloads(
self.overloads
.iter()
.map(|signature| signature.normalized_impl(db, visitor)),
)
}
pub(super) fn recursive_type_normalized_impl(
&self,
db: &'db dyn Db,
div: Type<'db>,
nested: bool,
) -> Option<Self> {
Some(Self {
overloads: self
.overloads
.iter()
.map(|signature| signature.recursive_type_normalized_impl(db, div, nested))
.collect::<Option<SmallVec<_>>>()?,
})
}
pub(crate) fn apply_type_mapping_impl<'a>(
&self,
db: &'db dyn Db,
type_mapping: &TypeMapping<'a, 'db>,
tcx: TypeContext<'db>,
visitor: &ApplyTypeMappingVisitor<'db>,
) -> Self {
fn try_apply_type_mapping_for_paramspec<'db>(
db: &'db dyn Db,
self_signature: &Signature<'db>,
prefix_parameters: &[Parameter<'db>],
paramspec_value: Type<'db>,
type_mapping: &TypeMapping<'_, 'db>,
tcx: TypeContext<'db>,
visitor: &ApplyTypeMappingVisitor<'db>,
) -> Option<CallableSignature<'db>> {
match paramspec_value {
Type::TypeVar(typevar) if typevar.is_paramspec(db) => {
Some(CallableSignature::single(Signature {
generic_context: self_signature.generic_context.map(|context| {
type_mapping.update_signature_generic_context(db, context)
}),
definition: self_signature.definition,
parameters: Parameters::new(
db,
prefix_parameters
.iter()
.map(|param| {
param.apply_type_mapping_impl(db, type_mapping, tcx, visitor)
})
.chain([
Parameter::variadic(Name::new_static("args"))
.with_annotated_type(Type::TypeVar(
typevar
.with_paramspec_attr(db, ParamSpecAttrKind::Args),
)),
Parameter::keyword_variadic(Name::new_static("kwargs"))
.with_annotated_type(Type::TypeVar(
typevar
.with_paramspec_attr(db, ParamSpecAttrKind::Kwargs),
)),
]),
),
return_ty: self_signature
.return_ty
.map(|ty| ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor)),
}))
}
Type::Callable(callable)
if matches!(callable.kind(db), CallableTypeKind::ParamSpecValue) =>
{
Some(CallableSignature::from_overloads(
callable.signatures(db).iter().map(|signature| Signature {
generic_context: self_signature.generic_context.map(|context| {
type_mapping.update_signature_generic_context(db, context)
}),
definition: signature.definition,
parameters: if signature.parameters().is_top() {
signature.parameters().clone()
} else {
Parameters::new(
db,
prefix_parameters
.iter()
.map(|param| {
param.apply_type_mapping_impl(
db,
type_mapping,
tcx,
visitor,
)
})
.chain(signature.parameters().iter().cloned()),
)
},
return_ty: self_signature.return_ty.map(|ty| {
ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor)
}),
}),
))
}
_ => None,
}
}
match type_mapping {
TypeMapping::Specialization(specialization) => {
if let [self_signature] = self.overloads.as_slice()
&& let Some((prefix_parameters, paramspec)) = self_signature
.parameters
.find_paramspec_from_args_kwargs(db)
&& let Some(paramspec_value) = specialization.get(db, paramspec)
&& let Some(result) = try_apply_type_mapping_for_paramspec(
db,
self_signature,
prefix_parameters,
paramspec_value,
type_mapping,
tcx,
visitor,
)
{
return result;
}
}
TypeMapping::PartialSpecialization(partial) => {
if let [self_signature] = self.overloads.as_slice()
&& let Some((prefix_parameters, paramspec)) = self_signature
.parameters
.find_paramspec_from_args_kwargs(db)
&& let Some(paramspec_value) = partial.get(db, paramspec)
&& let Some(result) = try_apply_type_mapping_for_paramspec(
db,
self_signature,
prefix_parameters,
paramspec_value,
type_mapping,
tcx,
visitor,
)
{
return result;
}
}
_ => {}
}
Self::from_overloads(
self.overloads
.iter()
.map(|signature| signature.apply_type_mapping_impl(db, type_mapping, tcx, visitor)),
)
}
pub(crate) fn find_legacy_typevars_impl(
&self,
db: &'db dyn Db,
binding_context: Option<Definition<'db>>,
typevars: &mut FxOrderSet<BoundTypeVarInstance<'db>>,
visitor: &FindLegacyTypeVarsVisitor<'db>,
) {
for signature in &self.overloads {
signature.find_legacy_typevars_impl(db, binding_context, typevars, visitor);
}
}
/// Binds the first (presumably `self`) parameter of this signature. If a `self_type` is
/// provided, we will replace any occurrences of `typing.Self` in the parameter and return
/// annotations with that type.
pub(crate) fn bind_self(&self, db: &'db dyn Db, self_type: Option<Type<'db>>) -> Self {
Self {
overloads: self
.overloads
.iter()
.map(|signature| signature.bind_self(db, self_type))
.collect(),
}
}
/// Replaces any occurrences of `typing.Self` in the parameter and return annotations with the
/// given type. (Does not bind the `self` parameter; to do that, use
/// [`bind_self`][Self::bind_self].)
pub(crate) fn apply_self(&self, db: &'db dyn Db, self_type: Type<'db>) -> Self {
Self {
overloads: self
.overloads
.iter()
.map(|signature| signature.apply_self(db, self_type))
.collect(),
}
}
fn is_subtype_of_impl(
&self,
db: &'db dyn Db,
other: &Self,
inferable: InferableTypeVars<'_, 'db>,
) -> ConstraintSet<'db> {
self.has_relation_to_impl(
db,
other,
inferable,
TypeRelation::Subtyping,
&HasRelationToVisitor::default(),
&IsDisjointVisitor::default(),
)
}
pub(crate) fn has_relation_to_impl(
&self,
db: &'db dyn Db,
other: &Self,
inferable: InferableTypeVars<'_, 'db>,
relation: TypeRelation<'db>,
relation_visitor: &HasRelationToVisitor<'db>,
disjointness_visitor: &IsDisjointVisitor<'db>,
) -> ConstraintSet<'db> {
Self::has_relation_to_inner(
db,
&self.overloads,
&other.overloads,
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
}
pub(crate) fn is_single_paramspec(
&self,
) -> Option<(BoundTypeVarInstance<'db>, Option<Type<'db>>)> {
Self::signatures_is_single_paramspec(&self.overloads)
}
/// Checks whether the given slice contains a single signature, and that signature is a
/// `ParamSpec` signature. If so, returns the [`BoundTypeVarInstance`] for the `ParamSpec`,
/// along with the return type of the signature.
fn signatures_is_single_paramspec(
signatures: &[Signature<'db>],
) -> Option<(BoundTypeVarInstance<'db>, Option<Type<'db>>)> {
// TODO: This might need updating once we support `Concatenate`
let [signature] = signatures else {
return None;
};
signature
.parameters
.as_paramspec()
.map(|bound_typevar| (bound_typevar, signature.return_ty))
}
pub(crate) fn when_constraint_set_assignable_to(
&self,
db: &'db dyn Db,
other: &Self,
inferable: InferableTypeVars<'_, 'db>,
) -> ConstraintSet<'db> {
self.has_relation_to_impl(
db,
other,
inferable,
TypeRelation::ConstraintSetAssignability,
&HasRelationToVisitor::default(),
&IsDisjointVisitor::default(),
)
}
/// Implementation of subtyping and assignability between two, possible overloaded, callable
/// types.
fn has_relation_to_inner(
db: &'db dyn Db,
self_signatures: &[Signature<'db>],
other_signatures: &[Signature<'db>],
inferable: InferableTypeVars<'_, 'db>,
relation: TypeRelation<'db>,
relation_visitor: &HasRelationToVisitor<'db>,
disjointness_visitor: &IsDisjointVisitor<'db>,
) -> ConstraintSet<'db> {
if relation.is_constraint_set_assignability() {
// TODO: Oof, maybe ParamSpec needs to live at CallableSignature, not Signature?
let self_is_single_paramspec = Self::signatures_is_single_paramspec(self_signatures);
let other_is_single_paramspec = Self::signatures_is_single_paramspec(other_signatures);
// If either callable is a ParamSpec, the constraint set should bind the ParamSpec to
// the other callable's signature. We also need to compare the return types — for
// instance, to verify in `Callable[P, int]` that the return type is assignable to
// `int`, or in `Callable[P, T]` to bind `T` to the return type of the other callable.
match (self_is_single_paramspec, other_is_single_paramspec) {
(
Some((self_bound_typevar, self_return_type)),
Some((other_bound_typevar, other_return_type)),
) => {
let param_spec_matches = ConstraintSet::constrain_typevar(
db,
self_bound_typevar,
Type::TypeVar(other_bound_typevar),
Type::TypeVar(other_bound_typevar),
);
let return_types_match = self_return_type.zip(other_return_type).when_some_and(
|(self_return_type, other_return_type)| {
self_return_type.has_relation_to_impl(
db,
other_return_type,
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
},
);
return param_spec_matches.and(db, || return_types_match);
}
(Some((self_bound_typevar, self_return_type)), None) => {
let upper =
Type::Callable(CallableType::new(
db,
CallableSignature::from_overloads(other_signatures.iter().map(
|signature| Signature::new(signature.parameters().clone(), None),
)),
CallableTypeKind::ParamSpecValue,
));
let param_spec_matches = ConstraintSet::constrain_typevar(
db,
self_bound_typevar,
Type::Never,
upper,
);
let return_types_match = self_return_type.when_some_and(|self_return_type| {
other_signatures
.iter()
.filter_map(|signature| signature.return_ty)
.when_any(db, |other_return_type| {
self_return_type.has_relation_to_impl(
db,
other_return_type,
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
})
});
return param_spec_matches.and(db, || return_types_match);
}
(None, Some((other_bound_typevar, other_return_type))) => {
let lower =
Type::Callable(CallableType::new(
db,
CallableSignature::from_overloads(self_signatures.iter().map(
|signature| Signature::new(signature.parameters().clone(), None),
)),
CallableTypeKind::ParamSpecValue,
));
let param_spec_matches = ConstraintSet::constrain_typevar(
db,
other_bound_typevar,
lower,
Type::object(),
);
let return_types_match = other_return_type.when_some_and(|other_return_type| {
self_signatures
.iter()
.filter_map(|signature| signature.return_ty)
.when_any(db, |self_return_type| {
self_return_type.has_relation_to_impl(
db,
other_return_type,
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
})
});
return param_spec_matches.and(db, || return_types_match);
}
(None, None) => {}
}
}
match (self_signatures, other_signatures) {
([self_signature], [other_signature]) => {
// Base case: both callable types contain a single signature.
self_signature.has_relation_to_impl(
db,
other_signature,
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
}
// `self` is possibly overloaded while `other` is definitely not overloaded.
(_, [_]) => self_signatures.iter().when_any(db, |self_signature| {
Self::has_relation_to_inner(
db,
std::slice::from_ref(self_signature),
other_signatures,
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
}),
// `self` is definitely not overloaded while `other` is possibly overloaded.
([_], _) => other_signatures.iter().when_all(db, |other_signature| {
Self::has_relation_to_inner(
db,
self_signatures,
std::slice::from_ref(other_signature),
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
}),
// `self` is definitely overloaded while `other` is possibly overloaded.
(_, _) => other_signatures.iter().when_all(db, |other_signature| {
Self::has_relation_to_inner(
db,
self_signatures,
std::slice::from_ref(other_signature),
inferable,
relation,
relation_visitor,
disjointness_visitor,
)
}),
}
}
/// Check whether this callable type is equivalent to another callable type.
///
/// See [`Type::is_equivalent_to`] for more details.
pub(crate) fn is_equivalent_to_impl(
&self,
db: &'db dyn Db,
other: &Self,
inferable: InferableTypeVars<'_, 'db>,
visitor: &IsEquivalentVisitor<'db>,
) -> ConstraintSet<'db> {
match (self.overloads.as_slice(), other.overloads.as_slice()) {
([self_signature], [other_signature]) => {
// Common case: both callable types contain a single signature, use the custom
// equivalence check instead of delegating it to the subtype check.
self_signature.is_equivalent_to_impl(db, other_signature, inferable, visitor)
}
(_, _) => {
if self == other {
return ConstraintSet::from(true);
}
self.is_subtype_of_impl(db, other, inferable)
.and(db, || other.is_subtype_of_impl(db, self, inferable))
}
}
}
}
impl<'a, 'db> IntoIterator for &'a CallableSignature<'db> {
type Item = &'a Signature<'db>;
type IntoIter = std::slice::Iter<'a, Signature<'db>>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'db> VarianceInferable<'db> for &CallableSignature<'db> {
// TODO: possibly need to replace self
fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance {
self.overloads
.iter()
.map(|signature| signature.variance_of(db, typevar))
.collect()
}
}
/// The signature of one of the overloads of a callable.
#[derive(Clone, Debug, salsa::Update, get_size2::GetSize, PartialEq, Eq, Hash)]
pub struct Signature<'db> {
/// The generic context for this overload, if it is generic.
pub(crate) generic_context: Option<GenericContext<'db>>,
/// The original definition associated with this function, if available.
/// This is useful for locating and extracting docstring information for the signature.
pub(crate) definition: Option<Definition<'db>>,
/// Parameters, in source order.
///
/// The ordering of parameters in a valid signature must be: first positional-only parameters,
/// then positional-or-keyword, then optionally the variadic parameter, then keyword-only
/// parameters, and last, optionally the variadic keywords parameter. Parameters with defaults
/// must come after parameters without defaults.
///
/// We may get invalid signatures, though, and need to handle them without panicking.
parameters: Parameters<'db>,
/// Annotated return type, if any.
pub(crate) return_ty: Option<Type<'db>>,
}
pub(super) fn walk_signature<'db, V: super::visitor::TypeVisitor<'db> + ?Sized>(
db: &'db dyn Db,
signature: &Signature<'db>,
visitor: &V,
) {
if let Some(generic_context) = &signature.generic_context {
walk_generic_context(db, *generic_context, visitor);
}
// By default we usually don't visit the type of the default value,
// as it isn't relevant to most things
for parameter in &signature.parameters {
if let Some(ty) = parameter.annotated_type() {
visitor.visit_type(db, ty);
}
}
if let Some(return_ty) = &signature.return_ty {
visitor.visit_type(db, *return_ty);
}
}
impl<'db> Signature<'db> {
pub(crate) fn new(parameters: Parameters<'db>, return_ty: Option<Type<'db>>) -> Self {
Self {
generic_context: None,
definition: None,
parameters,
return_ty,
}
}
pub(crate) fn new_generic(
generic_context: Option<GenericContext<'db>>,
parameters: Parameters<'db>,
return_ty: Option<Type<'db>>,
) -> Self {
Self {
generic_context,
definition: None,
parameters,
return_ty,
}
}
/// Return a signature for a dynamic callable
pub(crate) fn dynamic(signature_type: Type<'db>) -> Self {
Signature {
generic_context: None,
definition: None,
parameters: Parameters::gradual_form(),
return_ty: Some(signature_type),
}
}
/// Return a todo signature: (*args: Todo, **kwargs: Todo) -> Todo
#[allow(unused_variables)] // 'reason' only unused in debug builds
pub(crate) fn todo(reason: &'static str) -> Self {
let signature_type = todo_type!(reason);
Signature {
generic_context: None,
definition: None,
parameters: Parameters::todo(),
return_ty: Some(signature_type),
}
}
/// Return a typed signature from a function definition.
pub(super) fn from_function(
db: &'db dyn Db,
pep695_generic_context: Option<GenericContext<'db>>,
definition: Definition<'db>,
function_node: &ast::StmtFunctionDef,
has_implicitly_positional_first_parameter: bool,
) -> Self {
let parameters = Parameters::from_parameters(
db,
definition,
function_node.parameters.as_ref(),
has_implicitly_positional_first_parameter,
);
let return_ty = function_node
.returns
.as_ref()
.map(|returns| function_signature_expression_type(db, definition, returns.as_ref()));
let legacy_generic_context =
GenericContext::from_function_params(db, definition, ¶meters, return_ty);
let full_generic_context = GenericContext::merge_pep695_and_legacy(
db,
pep695_generic_context,
legacy_generic_context,
);
Self {
generic_context: full_generic_context,
definition: Some(definition),
parameters,
return_ty,
}
}
pub(super) fn wrap_coroutine_return_type(self, db: &'db dyn Db) -> Self {
let return_ty = self.return_ty.map(|return_ty| {
KnownClass::CoroutineType
.to_specialized_instance(db, [Type::any(), Type::any(), return_ty])
});
Self { return_ty, ..self }
}
/// Returns the signature which accepts any parameters and returns an `Unknown` type.
pub(crate) fn unknown() -> Self {
Self::new(Parameters::unknown(), Some(Type::unknown()))
}
/// Return the "bottom" signature, subtype of all other fully-static signatures.
pub(crate) fn bottom() -> Self {
Self::new(Parameters::bottom(), Some(Type::Never))
}
pub(crate) fn with_inherited_generic_context(
mut self,
db: &'db dyn Db,
inherited_generic_context: GenericContext<'db>,
) -> Self {
match self.generic_context.as_mut() {
Some(generic_context) => {
*generic_context = generic_context.merge(db, inherited_generic_context);
}
None => {
self.generic_context = Some(inherited_generic_context);
}
}
self
}
pub(crate) fn normalized_impl(
&self,
db: &'db dyn Db,
visitor: &NormalizedVisitor<'db>,
) -> Self {
Self {
generic_context: self
.generic_context
.map(|ctx| ctx.normalized_impl(db, visitor)),
// Discard the definition when normalizing, so that two equivalent signatures
// with different `Definition`s share the same Salsa ID when normalized
definition: None,
parameters: Parameters::new(
db,
self.parameters
.iter()
.map(|param| param.normalized_impl(db, visitor)),
),
return_ty: self
.return_ty
.map(|return_ty| return_ty.normalized_impl(db, visitor)),
}
}
pub(super) fn recursive_type_normalized_impl(
&self,
db: &'db dyn Db,
div: Type<'db>,
nested: bool,
) -> Option<Self> {
let return_ty = match self.return_ty {
Some(return_ty) if nested => {
Some(return_ty.recursive_type_normalized_impl(db, div, true)?)
}
Some(return_ty) => Some(
return_ty
.recursive_type_normalized_impl(db, div, true)
.unwrap_or(div),
),
None => None,
};
let parameters = {
let mut parameters = Vec::with_capacity(self.parameters.len());
for param in &self.parameters {
parameters.push(param.recursive_type_normalized_impl(db, div, nested)?);
}
Parameters::new(db, parameters)
};
Some(Self {
generic_context: self.generic_context,
definition: self.definition,
parameters,
return_ty,
})
}
pub(crate) fn apply_type_mapping_impl<'a>(
&self,
db: &'db dyn Db,
type_mapping: &TypeMapping<'a, 'db>,
tcx: TypeContext<'db>,
visitor: &ApplyTypeMappingVisitor<'db>,
) -> Self {
Self {
generic_context: self
.generic_context
.map(|context| type_mapping.update_signature_generic_context(db, context)),
definition: self.definition,
parameters: self
.parameters
.apply_type_mapping_impl(db, type_mapping, tcx, visitor),
return_ty: self
.return_ty
.map(|ty| ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor)),
}
}
pub(crate) fn find_legacy_typevars_impl(
&self,
| rust | MIT | 8464aca795bc3580ca15fcb52b21616939cea9a9 | 2026-01-04T15:31:59.413821Z | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.